From 66553feebae946b4f31ae8d07fd6ae8a44cb3719 Mon Sep 17 00:00:00 2001 From: cjihrig Date: Sat, 19 Jun 2021 18:12:54 -0400 Subject: [PATCH 001/133] src: compare IPv4 addresses in host byte order This commit updates compare_ipv4() to use the host byte order. PR-URL: https://github.com/nodejs/node/pull/39096 Fixes: https://github.com/nodejs/node/issues/39074 Reviewed-By: Khaidi Chu Reviewed-By: Anna Henningsen Reviewed-By: Luigi Pinca --- src/node_sockaddr.cc | 6 ++++-- test/parallel/test-blocklist.js | 21 +++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/src/node_sockaddr.cc b/src/node_sockaddr.cc index b15b6ab47194c1..09a74f302923f7 100644 --- a/src/node_sockaddr.cc +++ b/src/node_sockaddr.cc @@ -159,10 +159,12 @@ SocketAddress::CompareResult compare_ipv4( reinterpret_cast(one.data()); const sockaddr_in* two_in = reinterpret_cast(two.data()); + const uint32_t s_addr_one = ntohl(one_in->sin_addr.s_addr); + const uint32_t s_addr_two = ntohl(two_in->sin_addr.s_addr); - if (one_in->sin_addr.s_addr < two_in->sin_addr.s_addr) + if (s_addr_one < s_addr_two) return SocketAddress::CompareResult::LESS_THAN; - else if (one_in->sin_addr.s_addr == two_in->sin_addr.s_addr) + else if (s_addr_one == s_addr_two) return SocketAddress::CompareResult::SAME; else return SocketAddress::CompareResult::GREATER_THAN; diff --git a/test/parallel/test-blocklist.js b/test/parallel/test-blocklist.js index c72d9e30b5f2da..51f19e07bc649c 100644 --- a/test/parallel/test-blocklist.js +++ b/test/parallel/test-blocklist.js @@ -209,6 +209,27 @@ const util = require('util'); assert(!blockList.check('8592:757c:efaf:2fff:ffff:ffff:ffff:ffff', 'ipv6')); } +{ + // Regression test for https://github.com/nodejs/node/issues/39074 + const blockList = new BlockList(); + + blockList.addRange('10.0.0.2', '10.0.0.10'); + + // IPv4 checks against IPv4 range. + assert(blockList.check('10.0.0.2')); + assert(blockList.check('10.0.0.10')); + assert(!blockList.check('192.168.0.3')); + assert(!blockList.check('2.2.2.2')); + assert(!blockList.check('255.255.255.255')); + + // IPv6 checks against IPv4 range. + assert(blockList.check('::ffff:0a00:0002', 'ipv6')); + assert(blockList.check('::ffff:0a00:000a', 'ipv6')); + assert(!blockList.check('::ffff:c0a8:0003', 'ipv6')); + assert(!blockList.check('::ffff:0202:0202', 'ipv6')); + assert(!blockList.check('::ffff:ffff:ffff', 'ipv6')); +} + { const blockList = new BlockList(); assert.throws(() => blockList.addRange('1.1.1.2', '1.1.1.1'), /ERR_INVALID_ARG_VALUE/); From cf0533b8b2cbbd9fdac060f812c99ae664ccf2a6 Mon Sep 17 00:00:00 2001 From: Matheus Marchini Date: Wed, 18 Mar 2020 11:00:02 -0700 Subject: [PATCH 002/133] build: use Actions to validate commit message Actions interface has a better integration with GitHub, and with Annotations and Problem Matcher we can display all failed checks in a single place, so that users don't have to go through the logs to figure out what's wrong. Since the job on Travis was allowed to fail and is not as easy to read, remove it from our Matrix. The Action will check every commit in the Pull Request, skipping commits with "fixup" or "squash". PR-URL: https://github.com/nodejs/node/pull/32417 Reviewed-By: Colin Ihrig Reviewed-By: Anna Henningsen Reviewed-By: Rich Trott Reviewed-By: Michael Dawson --- .../commit-lint-problem-matcher.json | 13 ++++++++++++ .github/workflows/commit-lint.yml | 21 +++++++++++++++++++ 2 files changed, 34 insertions(+) create mode 100644 .github/workflows/commit-lint-problem-matcher.json create mode 100644 .github/workflows/commit-lint.yml diff --git a/.github/workflows/commit-lint-problem-matcher.json b/.github/workflows/commit-lint-problem-matcher.json new file mode 100644 index 00000000000000..72dd13b9e0929d --- /dev/null +++ b/.github/workflows/commit-lint-problem-matcher.json @@ -0,0 +1,13 @@ +{ + "problemMatcher": [ + { + "owner": "core-validate-commit", + "pattern": [ + { + "regexp": "^not ok \\d+ (.*)$", + "message": 1 + } + ] + } + ] +} diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml new file mode 100644 index 00000000000000..6673f2942da89f --- /dev/null +++ b/.github/workflows/commit-lint.yml @@ -0,0 +1,21 @@ +name: "Commit messages adheres to guidelines at https://goo.gl/p2fr5Q" + +on: [pull_request] + +jobs: + lint-commit-message: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.head.sha }} + # Last 100 commits should be enough for a PR + fetch-depth: 100 + - name: Use Node.js 12 + uses: actions/setup-node@v1 + with: + node-version: 12.x + - name: Validate commit messages + run: | + echo "::add-matcher::.github/workflows/commit-lint-problem-matcher.json" + git log --oneline ${{ github.event.pull_request.base.sha }}..${{ github.event.pull_request.head.sha }} | grep -v -e fixup -e squash | awk '{ print $1 }' | xargs npx -q core-validate-commit --no-validate-metadata --tap From cf8536ea3fe84541b8e4a144f0ed01866962d1f1 Mon Sep 17 00:00:00 2001 From: Mary Marchini Date: Tue, 22 Jun 2021 11:50:34 -0700 Subject: [PATCH 003/133] build: fix commit linter on unrebased PRs The commit linter was checking out the PR HEAD commit instead of merge/rebase commit, causing it to fail for any PRs that were not rebased on our default branch. Removing `ref` should fix the issue. PR-URL: https://github.com/nodejs/node/pull/39121 Reviewed-By: Richard Lau Reviewed-By: Antoine du Hamel --- .github/workflows/commit-lint.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index 6673f2942da89f..9ea6337b51f181 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -8,7 +8,6 @@ jobs: steps: - uses: actions/checkout@v2 with: - ref: ${{ github.event.pull_request.head.sha }} # Last 100 commits should be enough for a PR fetch-depth: 100 - name: Use Node.js 12 From a440f6c69c1adb0c4e5c5d0d5f5084afa35d9b2f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Tue, 22 Jun 2021 10:40:43 +0200 Subject: [PATCH 004/133] doc: fix dead links in packages.md PR-URL: https://github.com/nodejs/node/pull/39113 Reviewed-By: Colin Ihrig Reviewed-By: Antoine du Hamel Reviewed-By: Darshan Sen Reviewed-By: Daijiro Wachi Reviewed-By: Luigi Pinca --- doc/api/packages.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/api/packages.md b/doc/api/packages.md index 4e7c8691a980e7..cd697e9785fded 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -610,7 +610,7 @@ Any number of custom conditions can be set with repeat flags. The `"import"`, `"require"`, `"node"` and `"default"` conditions are defined and implemented in Node.js core, -[as specified above](#esm_conditional_exports). +[as specified above](#packages_conditional_exports). Other condition strings are unknown to Node.js and thus ignored by default. Runtimes or tools other than Node.js can use them at their discretion. @@ -805,7 +805,7 @@ The preceding example uses explicit extensions `.mjs` and `.cjs`. If your files use the `.js` extension, `"type": "module"` will cause such files to be treated as ES modules, just as `"type": "commonjs"` would cause them to be treated as CommonJS. -See [Enabling](#esm_enabling). +See [Enabling](esm.md#esm_enabling). ```cjs // ./node_modules/pkg/index.cjs From 171ca6bb3c312f93b3491e97c6a1fb8a5bccd6b1 Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Wed, 16 Jun 2021 07:20:43 -0400 Subject: [PATCH 005/133] build: don't pass `--mode` argument to V8 test-runner MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit V8's test-runner dropped the `--mode` argument some time back, and now produces the following error if run with it: run-tests.py: error: no such option: --mode PR-URL: https://github.com/nodejs/node/pull/39055 Refs: https://github.com/nodejs/node/pull/35705 Reviewed-By: Jiawen Geng Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 58260979e11458..1cfa4ea7137695 100644 --- a/Makefile +++ b/Makefile @@ -667,12 +667,12 @@ test-v8: v8 ## Runs the V8 test suite on deps/v8. test-v8-intl: v8 export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ - --mode=$(BUILDTYPE_LOWER) intl \ + intl \ $(TAP_V8_INTL) test-v8-benchmarks: v8 export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ - deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) --mode=$(BUILDTYPE_LOWER) \ + deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ benchmarks \ $(TAP_V8_BENCHMARKS) From c1588887a6b6a250f8acd53d3f4d2fcdb73007e1 Mon Sep 17 00:00:00 2001 From: Davidson Francis Date: Mon, 21 Jun 2021 00:11:48 -0300 Subject: [PATCH 006/133] doc: fix napi_default_property name MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix the napi_default_jsproperty flag of the napi_property_attributes enum that was incorrectly referred to as napi_default_property. Signed-off-by: Davidson Francis PR-URL: https://github.com/nodejs/node/pull/39104 Reviewed-By: Chengzhong Wu Reviewed-By: Tobias Nießen Reviewed-By: Gerhard Stöbich --- doc/api/n-api.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index dca306b83cdede..6115bc14674429 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -3798,7 +3798,7 @@ typedef enum { napi_default_method = napi_writable | napi_configurable, // Default for object properties, like in JS obj[prop]. - napi_default_property = napi_writable | + napi_default_jsproperty = napi_writable | napi_enumerable | napi_configurable, } napi_property_attributes; @@ -3821,8 +3821,8 @@ They can be one or more of the following bitflags: [`napi_define_class`][]. It is ignored by `napi_define_properties`. * `napi_default_method`: Like a method in a JS class, the property is configurable and writeable, but not enumerable. -* `napi_default_property`: Like a property set via assignment in JavaScript, the - property is writable, enumerable, and configurable. +* `napi_default_jsproperty`: Like a property set via assignment in JavaScript, + the property is writable, enumerable, and configurable. #### napi_property_descriptor From 412b1012d24c187a53a6a69abc3817fd22102053 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Fri, 18 Jun 2021 05:01:41 +0800 Subject: [PATCH 007/133] build: pass directory instead of list of files to js2c.py On Windows there is a limit to the length of commands, so there will be an error once the lengths of the JS file names combined exceed that limit. This patch modifies js2c.py so that it now takes a --directory argument to glob for .js and .mjs files in addition to the list of files passed directly. We still pass the additional files we include from deps/ directly through the command line, as we only includes some of them so we cannot simply glob, but those are limited so listing them out should be fine. Refs: https://docs.microsoft.com/en-us/troubleshoot/windows-client/shell-experience/command-line-string-limitation PR-URL: https://github.com/nodejs/node/pull/39069 Refs: https://github.com/nodejs/node/pull/38971 Reviewed-By: Chengzhong Wu Reviewed-By: James M Snell --- node.gyp | 265 +++--------------------------------------- tools/js2c.py | 14 ++- tools/search_files.py | 22 ++++ tools/utils.py | 9 ++ 4 files changed, 63 insertions(+), 247 deletions(-) create mode 100644 tools/search_files.py diff --git a/node.gyp b/node.gyp index 1de9edc07f610d..bb80fa30bc8fe1 100644 --- a/node.gyp +++ b/node.gyp @@ -27,251 +27,15 @@ 'node_lib_target_name%': 'libnode', 'node_intermediate_lib_type%': 'static_library', 'node_builtin_modules_path%': '', + # We list the deps/ files out instead of globbing them in js2c.py since we + # only include a subset of all the files under these directories. + # The lengths of their file names combined should not exceed the + # Windows command length limit or there would be an error. + # See https://docs.microsoft.com/en-us/troubleshoot/windows-client/shell-experience/command-line-string-limitation 'library_files': [ - 'lib/internal/bootstrap/environment.js', - 'lib/internal/bootstrap/loaders.js', - 'lib/internal/bootstrap/node.js', - 'lib/internal/bootstrap/pre_execution.js', - 'lib/internal/bootstrap/switches/does_own_process_state.js', - 'lib/internal/bootstrap/switches/does_not_own_process_state.js', - 'lib/internal/bootstrap/switches/is_main_thread.js', - 'lib/internal/bootstrap/switches/is_not_main_thread.js', - 'lib/internal/per_context/primordials.js', - 'lib/internal/per_context/domexception.js', - 'lib/internal/per_context/messageport.js', - 'lib/async_hooks.js', - 'lib/assert.js', - 'lib/assert/strict.js', - 'lib/buffer.js', - 'lib/child_process.js', - 'lib/console.js', - 'lib/constants.js', - 'lib/crypto.js', - 'lib/cluster.js', - 'lib/diagnostics_channel.js', - 'lib/dgram.js', - 'lib/dns.js', - 'lib/dns/promises.js', - 'lib/domain.js', - 'lib/events.js', - 'lib/fs.js', - 'lib/fs/promises.js', - 'lib/http.js', - 'lib/http2.js', - 'lib/_http_agent.js', - 'lib/_http_client.js', - 'lib/_http_common.js', - 'lib/_http_incoming.js', - 'lib/_http_outgoing.js', - 'lib/_http_server.js', - 'lib/https.js', - 'lib/inspector.js', - 'lib/module.js', - 'lib/net.js', - 'lib/os.js', - 'lib/path.js', - 'lib/path/posix.js', - 'lib/path/win32.js', - 'lib/perf_hooks.js', - 'lib/process.js', - 'lib/punycode.js', - 'lib/querystring.js', - 'lib/readline.js', - 'lib/repl.js', - 'lib/stream.js', - 'lib/stream/promises.js', - 'lib/_stream_readable.js', - 'lib/_stream_writable.js', - 'lib/_stream_duplex.js', - 'lib/_stream_transform.js', - 'lib/_stream_passthrough.js', - 'lib/_stream_wrap.js', - 'lib/string_decoder.js', - 'lib/sys.js', - 'lib/timers/promises.js', - 'lib/timers.js', - 'lib/tls.js', - 'lib/_tls_common.js', - 'lib/_tls_wrap.js', - 'lib/trace_events.js', - 'lib/tty.js', - 'lib/url.js', - 'lib/util.js', - 'lib/util/types.js', - 'lib/v8.js', - 'lib/vm.js', - 'lib/wasi.js', - 'lib/worker_threads.js', - 'lib/zlib.js', - 'lib/internal/abort_controller.js', - 'lib/internal/assert.js', - 'lib/internal/assert/assertion_error.js', - 'lib/internal/assert/calltracker.js', - 'lib/internal/async_hooks.js', - 'lib/internal/blob.js', - 'lib/internal/blocklist.js', - 'lib/internal/buffer.js', - 'lib/internal/cli_table.js', - 'lib/internal/child_process.js', - 'lib/internal/child_process/serialization.js', - 'lib/internal/cluster/child.js', - 'lib/internal/cluster/primary.js', - 'lib/internal/cluster/round_robin_handle.js', - 'lib/internal/cluster/shared_handle.js', - 'lib/internal/cluster/utils.js', - 'lib/internal/cluster/worker.js', - 'lib/internal/console/constructor.js', - 'lib/internal/console/global.js', - 'lib/internal/crypto/aes.js', - 'lib/internal/crypto/certificate.js', - 'lib/internal/crypto/cipher.js', - 'lib/internal/crypto/diffiehellman.js', - 'lib/internal/crypto/dsa.js', - 'lib/internal/crypto/ec.js', - 'lib/internal/crypto/hash.js', - 'lib/internal/crypto/hashnames.js', - 'lib/internal/crypto/hkdf.js', - 'lib/internal/crypto/keygen.js', - 'lib/internal/crypto/keys.js', - 'lib/internal/crypto/mac.js', - 'lib/internal/crypto/pbkdf2.js', - 'lib/internal/crypto/random.js', - 'lib/internal/crypto/rsa.js', - 'lib/internal/crypto/scrypt.js', - 'lib/internal/crypto/sig.js', - 'lib/internal/crypto/util.js', - 'lib/internal/crypto/webcrypto.js', - 'lib/internal/crypto/x509.js', - 'lib/internal/constants.js', - 'lib/internal/debugger/_inspect.js', - 'lib/internal/debugger/inspect_client.js', - 'lib/internal/debugger/inspect_repl.js', - 'lib/internal/dgram.js', - 'lib/internal/dns/promises.js', - 'lib/internal/dns/utils.js', - 'lib/internal/dtrace.js', - 'lib/internal/encoding.js', - 'lib/internal/errors.js', - 'lib/internal/error_serdes.js', - 'lib/internal/event_target.js', - 'lib/internal/fixed_queue.js', - 'lib/internal/freelist.js', - 'lib/internal/freeze_intrinsics.js', - 'lib/internal/fs/dir.js', - 'lib/internal/fs/promises.js', - 'lib/internal/fs/read_file_context.js', - 'lib/internal/fs/rimraf.js', - 'lib/internal/fs/streams.js', - 'lib/internal/fs/sync_write_stream.js', - 'lib/internal/fs/utils.js', - 'lib/internal/fs/watchers.js', - 'lib/internal/http.js', - 'lib/internal/heap_utils.js', - 'lib/internal/histogram.js', - 'lib/internal/idna.js', - 'lib/internal/inspector_async_hook.js', - 'lib/internal/js_stream_socket.js', - 'lib/internal/legacy/processbinding.js', - 'lib/internal/linkedlist.js', - 'lib/internal/main/check_syntax.js', - 'lib/internal/main/eval_string.js', - 'lib/internal/main/eval_stdin.js', - 'lib/internal/main/inspect.js', - 'lib/internal/main/print_help.js', - 'lib/internal/main/prof_process.js', - 'lib/internal/main/repl.js', - 'lib/internal/main/run_main_module.js', - 'lib/internal/main/worker_thread.js', - 'lib/internal/modules/run_main.js', - 'lib/internal/modules/package_json_reader.js', - 'lib/internal/modules/cjs/helpers.js', - 'lib/internal/modules/cjs/loader.js', - 'lib/internal/modules/esm/loader.js', - 'lib/internal/modules/esm/create_dynamic_module.js', - 'lib/internal/modules/esm/get_format.js', - 'lib/internal/modules/esm/get_source.js', - 'lib/internal/modules/esm/module_job.js', - 'lib/internal/modules/esm/module_map.js', - 'lib/internal/modules/esm/resolve.js', - 'lib/internal/modules/esm/transform_source.js', - 'lib/internal/modules/esm/translators.js', - 'lib/internal/net.js', - 'lib/internal/options.js', - 'lib/internal/perf/perf.js', - 'lib/internal/perf/nodetiming.js', - 'lib/internal/perf/usertiming.js', - 'lib/internal/perf/observe.js', - 'lib/internal/perf/event_loop_delay.js', - 'lib/internal/perf/event_loop_utilization.js', - 'lib/internal/perf/timerify.js', - 'lib/internal/policy/manifest.js', - 'lib/internal/policy/sri.js', - 'lib/internal/priority_queue.js', - 'lib/internal/process/esm_loader.js', - 'lib/internal/process/execution.js', - 'lib/internal/process/per_thread.js', - 'lib/internal/process/policy.js', - 'lib/internal/process/promises.js', - 'lib/internal/process/warning.js', - 'lib/internal/process/worker_thread_only.js', - 'lib/internal/process/report.js', - 'lib/internal/process/signal.js', - 'lib/internal/process/task_queues.js', - 'lib/internal/querystring.js', - 'lib/internal/readline/callbacks.js', - 'lib/internal/readline/emitKeypressEvents.js', - 'lib/internal/readline/utils.js', - 'lib/internal/repl.js', - 'lib/internal/repl/await.js', - 'lib/internal/repl/history.js', - 'lib/internal/repl/utils.js', - 'lib/internal/socketaddress.js', - 'lib/internal/socket_list.js', - 'lib/internal/source_map/prepare_stack_trace.js', - 'lib/internal/source_map/source_map.js', - 'lib/internal/source_map/source_map_cache.js', - 'lib/internal/test/binding.js', - 'lib/internal/test/transfer.js', - 'lib/internal/timers.js', - 'lib/internal/tls.js', - 'lib/internal/trace_events_async_hooks.js', - 'lib/internal/tty.js', - 'lib/internal/url.js', - 'lib/internal/util.js', - 'lib/internal/util/comparisons.js', - 'lib/internal/util/debuglog.js', - 'lib/internal/util/inspect.js', - 'lib/internal/util/inspector.js', - 'lib/internal/util/iterable_weak_map.js', - 'lib/internal/util/types.js', - 'lib/internal/http2/core.js', - 'lib/internal/http2/compat.js', - 'lib/internal/http2/util.js', - 'lib/internal/v8_prof_polyfill.js', - 'lib/internal/v8_prof_processor.js', - 'lib/internal/validators.js', - 'lib/internal/stream_base_commons.js', - 'lib/internal/vm/module.js', - 'lib/internal/worker.js', - 'lib/internal/worker/io.js', - 'lib/internal/worker/js_transferable.js', - 'lib/internal/watchdog.js', - 'lib/internal/streams/lazy_transform.js', - 'lib/internal/streams/add-abort-signal.js', - 'lib/internal/streams/buffer_list.js', - 'lib/internal/streams/duplexpair.js', - 'lib/internal/streams/from.js', - 'lib/internal/streams/legacy.js', - 'lib/internal/streams/readable.js', - 'lib/internal/streams/writable.js', - 'lib/internal/streams/duplex.js', - 'lib/internal/streams/passthrough.js', - 'lib/internal/streams/transform.js', - 'lib/internal/streams/destroy.js', - 'lib/internal/streams/state.js', - 'lib/internal/streams/pipeline.js', - 'lib/internal/streams/end-of-stream.js', - 'lib/internal/streams/utils.js', + ' Date: Tue, 15 Jun 2021 10:09:29 -0700 Subject: [PATCH 008/133] doc: esm examples /w imports for process, Buffer PR-URL: https://github.com/nodejs/node/pull/39043 Reviewed-By: Bradley Farias --- .eslintrc.js | 8 + doc/api/assert.md | 1 + doc/api/async_context.md | 205 +++- doc/api/async_hooks.md | 211 +++- doc/api/buffer.md | 1819 ++++++++++++++++++++++++++++++-- doc/api/cluster.md | 199 +++- doc/api/crypto.md | 197 ++-- doc/api/dgram.md | 116 +- doc/api/diagnostics_channel.md | 98 +- doc/api/esm.md | 7 +- doc/api/fs.md | 5 + doc/api/process.md | 1187 ++++++++++++++++++--- doc/api/wasi.md | 11 +- 13 files changed, 3659 insertions(+), 405 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index bda641797e2951..f5100fd3cc91b8 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -98,6 +98,14 @@ module.exports = { name: 'require', message: 'Use import instead', }, + { + name: 'Buffer', + message: 'Import Buffer instead of using the global' + }, + { + name: 'process', + message: 'Import process instead of using the global' + }, ] }, }, ], diff --git a/doc/api/assert.md b/doc/api/assert.md index 7eb952ace7be71..037d233be0acd9 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -240,6 +240,7 @@ for the verification to take place. The usual pattern would be to call it in a ```mjs import assert from 'assert'; +import process from 'process'; const tracker = new assert.CallTracker(); diff --git a/doc/api/async_context.md b/doc/api/async_context.md index 0caf1d6bb62078..20cf08c06b13a4 100644 --- a/doc/api/async_context.md +++ b/doc/api/async_context.md @@ -14,7 +14,11 @@ in other languages. The `AsyncLocalStorage` and `AsyncResource` classes are part of the `async_hooks` module: -```js +```mjs +import async_hooks from 'async_hooks'; +``` + +```cjs const async_hooks = require('async_hooks'); ``` @@ -40,7 +44,39 @@ The following example uses `AsyncLocalStorage` to build a simple logger that assigns IDs to incoming HTTP requests and includes them in messages logged within each request. -```js +```mjs +import http from 'http'; +import { AsyncLocalStorage } from 'async_hooks'; + +const asyncLocalStorage = new AsyncLocalStorage(); + +function logWithId(msg) { + const id = asyncLocalStorage.getStore(); + console.log(`${id !== undefined ? id : '-'}:`, msg); +} + +let idSeq = 0; +http.createServer((req, res) => { + asyncLocalStorage.run(idSeq++, () => { + logWithId('start'); + // Imagine any chain of async operations here + setImmediate(() => { + logWithId('finish'); + res.end(); + }); + }); +}).listen(8080); + +http.get('http://localhost:8080'); +http.get('http://localhost:8080'); +// Prints: +// 0: start +// 1: start +// 0: finish +// 1: finish +``` + +```cjs const http = require('http'); const { AsyncLocalStorage } = require('async_hooks'); @@ -299,7 +335,35 @@ The `init` hook will trigger when an `AsyncResource` is instantiated. The following is an overview of the `AsyncResource` API. -```js +```mjs +import { AsyncResource, executionAsyncId } from 'async_hooks'; + +// AsyncResource() is meant to be extended. Instantiating a +// new AsyncResource() also triggers init. If triggerAsyncId is omitted then +// async_hook.executionAsyncId() is used. +const asyncResource = new AsyncResource( + type, { triggerAsyncId: executionAsyncId(), requireManualDestroy: false } +); + +// Run a function in the execution context of the resource. This will +// * establish the context of the resource +// * trigger the AsyncHooks before callbacks +// * call the provided function `fn` with the supplied arguments +// * trigger the AsyncHooks after callbacks +// * restore the original execution context +asyncResource.runInAsyncScope(fn, thisArg, ...args); + +// Call AsyncHooks destroy callbacks. +asyncResource.emitDestroy(); + +// Return the unique ID assigned to the AsyncResource instance. +asyncResource.asyncId(); + +// Return the trigger ID for the AsyncResource instance. +asyncResource.triggerAsyncId(); +``` + +```cjs const { AsyncResource, executionAsyncId } = require('async_hooks'); // AsyncResource() is meant to be extended. Instantiating a @@ -446,7 +510,14 @@ database connection pools, can follow a similar model. Assuming that the task is adding two numbers, using a file named `task_processor.js` with the following content: -```js +```mjs +import { parentPort } from 'worker_threads'; +parentPort.on('message', (task) => { + parentPort.postMessage(task.a + task.b); +}); +``` + +```cjs const { parentPort } = require('worker_threads'); parentPort.on('message', (task) => { parentPort.postMessage(task.a + task.b); @@ -455,7 +526,95 @@ parentPort.on('message', (task) => { a Worker pool around it could use the following structure: -```js +```mjs +import { AsyncResource } from 'async_hooks'; +import { EventEmitter } from 'events'; +import path from 'path'; +import { Worker } from 'worker_threads'; + +const kTaskInfo = Symbol('kTaskInfo'); +const kWorkerFreedEvent = Symbol('kWorkerFreedEvent'); + +class WorkerPoolTaskInfo extends AsyncResource { + constructor(callback) { + super('WorkerPoolTaskInfo'); + this.callback = callback; + } + + done(err, result) { + this.runInAsyncScope(this.callback, null, err, result); + this.emitDestroy(); // `TaskInfo`s are used only once. + } +} + +export default class WorkerPool extends EventEmitter { + constructor(numThreads) { + super(); + this.numThreads = numThreads; + this.workers = []; + this.freeWorkers = []; + this.tasks = []; + + for (let i = 0; i < numThreads; i++) + this.addNewWorker(); + + // Any time the kWorkerFreedEvent is emitted, dispatch + // the next task pending in the queue, if any. + this.on(kWorkerFreedEvent, () => { + if (this.tasks.length > 0) { + const { task, callback } = this.tasks.shift(); + this.runTask(task, callback); + } + }); + } + + addNewWorker() { + const worker = new Worker(new URL('task_processer.js', import.meta.url)); + worker.on('message', (result) => { + // In case of success: Call the callback that was passed to `runTask`, + // remove the `TaskInfo` associated with the Worker, and mark it as free + // again. + worker[kTaskInfo].done(null, result); + worker[kTaskInfo] = null; + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + }); + worker.on('error', (err) => { + // In case of an uncaught exception: Call the callback that was passed to + // `runTask` with the error. + if (worker[kTaskInfo]) + worker[kTaskInfo].done(err, null); + else + this.emit('error', err); + // Remove the worker from the list and start a new Worker to replace the + // current one. + this.workers.splice(this.workers.indexOf(worker), 1); + this.addNewWorker(); + }); + this.workers.push(worker); + this.freeWorkers.push(worker); + this.emit(kWorkerFreedEvent); + } + + runTask(task, callback) { + if (this.freeWorkers.length === 0) { + // No free threads, wait until a worker thread becomes free. + this.tasks.push({ task, callback }); + return; + } + + const worker = this.freeWorkers.pop(); + worker[kTaskInfo] = new WorkerPoolTaskInfo(callback); + worker.postMessage(task); + } + + close() { + for (const worker of this.workers) worker.terminate(); + } +} +``` + +```cjs const { AsyncResource } = require('async_hooks'); const { EventEmitter } = require('events'); const path = require('path'); @@ -553,7 +712,23 @@ were scheduled. This pool could be used as follows: -```js +```mjs +import WorkerPool from './worker_pool.js'; +import os from 'os'; + +const pool = new WorkerPool(os.cpus().length); + +let finished = 0; +for (let i = 0; i < 10; i++) { + pool.runTask({ a: 42, b: 100 }, (err, result) => { + console.log(i, err, result); + if (++finished === 10) + pool.close(); + }); +} +``` + +```cjs const WorkerPool = require('./worker_pool.js'); const os = require('os'); @@ -579,7 +754,22 @@ The following example shows how to use the `AsyncResource` class to properly associate an event listener with the correct execution context. The same approach can be applied to a [`Stream`][] or a similar event-driven class. -```js +```mjs +import { createServer } from 'http'; +import { AsyncResource, executionAsyncId } from 'async_hooks'; + +const server = createServer((req, res) => { + req.on('close', AsyncResource.bind(() => { + // Execution context is bound to the current outer scope. + })); + req.on('close', () => { + // Execution context is bound to the scope that caused 'close' to emit. + }); + res.end(); +}).listen(3000); +``` + +```cjs const { createServer } = require('http'); const { AsyncResource, executionAsyncId } = require('async_hooks'); @@ -593,6 +783,7 @@ const server = createServer((req, res) => { res.end(); }).listen(3000); ``` + [`AsyncResource`]: #async_context_class_asyncresource [`EventEmitter`]: events.md#events_class_eventemitter [`Stream`]: stream.md#stream_stream diff --git a/doc/api/async_hooks.md b/doc/api/async_hooks.md index f6805102fdf050..e09e89fc377cbb 100644 --- a/doc/api/async_hooks.md +++ b/doc/api/async_hooks.md @@ -9,7 +9,11 @@ The `async_hooks` module provides an API to track asynchronous resources. It can be accessed using: -```js +```mjs +import async_hooks from 'async_hooks'; +``` + +```cjs const async_hooks = require('async_hooks'); ``` @@ -29,7 +33,55 @@ interface, and each thread will use a new set of async IDs. Following is a simple overview of the public API. -```js +```mjs +import async_hooks from 'async_hooks'; + +// Return the ID of the current execution context. +const eid = async_hooks.executionAsyncId(); + +// Return the ID of the handle responsible for triggering the callback of the +// current execution scope to call. +const tid = async_hooks.triggerAsyncId(); + +// Create a new AsyncHook instance. All of these callbacks are optional. +const asyncHook = + async_hooks.createHook({ init, before, after, destroy, promiseResolve }); + +// Allow callbacks of this AsyncHook instance to call. This is not an implicit +// action after running the constructor, and must be explicitly run to begin +// executing callbacks. +asyncHook.enable(); + +// Disable listening for new asynchronous events. +asyncHook.disable(); + +// +// The following are the callbacks that can be passed to createHook(). +// + +// init is called during object construction. The resource may not have +// completed construction when this callback runs, therefore all fields of the +// resource referenced by "asyncId" may not have been populated. +function init(asyncId, type, triggerAsyncId, resource) { } + +// Before is called just before the resource's callback is called. It can be +// called 0-N times for handles (such as TCPWrap), and will be called exactly 1 +// time for requests (such as FSReqCallback). +function before(asyncId) { } + +// After is called just after the resource's callback has finished. +function after(asyncId) { } + +// Destroy is called when the resource is destroyed. +function destroy(asyncId) { } + +// promiseResolve is called only for promise resources, when the +// `resolve` function passed to the `Promise` constructor is invoked +// (either directly or through other means of resolving a promise). +function promiseResolve(asyncId) { } +``` + +```cjs const async_hooks = require('async_hooks'); // Return the ID of the current execution context. @@ -102,7 +154,16 @@ be tracked, then only the `destroy` callback needs to be passed. The specifics of all functions that can be passed to `callbacks` is in the [Hook Callbacks][] section. -```js +```mjs +import { createHook } from 'async_hooks'; + +const asyncHook = createHook({ + init(asyncId, type, triggerAsyncId, resource) { }, + destroy(asyncId) { } +}); +``` + +```cjs const async_hooks = require('async_hooks'); const asyncHook = async_hooks.createHook({ @@ -158,7 +219,17 @@ synchronous logging operation such as `fs.writeFileSync(file, msg, flag)`. This will print to the file and will not invoke AsyncHooks recursively because it is synchronous. -```js +```mjs +import { writeFileSync } from 'fs'; +import { format } from 'util'; + +function debug(...args) { + // Use a function like this one when debugging inside an AsyncHooks callback + writeFileSync('log.out', `${format(...args)}\n`, { flag: 'a' }); +} +``` + +```cjs const fs = require('fs'); const util = require('util'); @@ -189,7 +260,13 @@ provided, enabling is a no-op. The `AsyncHook` instance is disabled by default. If the `AsyncHook` instance should be enabled immediately after creation, the following pattern can be used. -```js +```mjs +import { createHook } from 'async_hooks'; + +const hook = createHook(callbacks).enable(); +``` + +```cjs const async_hooks = require('async_hooks'); const hook = async_hooks.createHook(callbacks).enable(); @@ -229,7 +306,15 @@ This behavior can be observed by doing something like opening a resource then closing it before the resource can be used. The following snippet demonstrates this. -```js +```mjs +import { createServer } from 'net'; + +createServer().listen(function() { this.close(); }); +// OR +clearTimeout(setTimeout(() => {}, 10)); +``` + +```cjs require('net').createServer().listen(function() { this.close(); }); // OR clearTimeout(setTimeout(() => {}, 10)); @@ -270,12 +355,31 @@ created, while `triggerAsyncId` shows *why* a resource was created. The following is a simple demonstration of `triggerAsyncId`: -```js -const { fd } = process.stdout; +```mjs +import { createHook, executionASyncId } from 'async_hooks'; +import { stdout } from 'process'; +import net from 'net'; -async_hooks.createHook({ +createHook({ init(asyncId, type, triggerAsyncId) { - const eid = async_hooks.executionAsyncId(); + const eid = executionAsyncId(); + fs.writeSync( + stdout.fd, + `${type}(${asyncId}): trigger: ${triggerAsyncId} execution: ${eid}\n`); + } +}).enable(); + +net.createServer((conn) => {}).listen(8080); +``` + +```cjs +const { createHook, executionAsyncId } = require('async_hooks'); +const { fd } = require('process').stdout; +const net = require('net'); + +createHook({ + init(asyncId, type, triggerAsyncId) { + const eid = executionAsyncId(); fs.writeSync( fd, `${type}(${asyncId}): trigger: ${triggerAsyncId} execution: ${eid}\n`); @@ -506,7 +610,17 @@ Using `executionAsyncResource()` in the top-level execution context will return an empty object as there is no handle or request object to use, but having an object representing the top-level can be helpful. -```js +```mjs +import { open } from 'fs'; +import { executionAsyncId, executionAsyncResource } from 'async_hooks'; + +console.log(executionAsyncId(), executionAsyncResource()); // 1 {} +open(new URL(import.meta.url), 'r', (err, fd) => { + console.log(executionAsyncId(), executionAsyncResource()); // 7 FSReqWrap +}); +``` + +```cjs const { open } = require('fs'); const { executionAsyncId, executionAsyncResource } = require('async_hooks'); @@ -519,7 +633,33 @@ open(__filename, 'r', (err, fd) => { This can be used to implement continuation local storage without the use of a tracking `Map` to store the metadata: -```js +```mjs +import { createServer } from 'http'; +import { + executionAsyncId, + executionAsyncResource, + createHook +} from 'async_hooks'; +const sym = Symbol('state'); // Private symbol to avoid pollution + +createHook({ + init(asyncId, type, triggerAsyncId, resource) { + const cr = executionAsyncResource(); + if (cr) { + resource[sym] = cr[sym]; + } + } +}).enable(); + +const server = createServer((req, res) => { + executionAsyncResource()[sym] = { state: req.url }; + setTimeout(function() { + res.end(JSON.stringify(executionAsyncResource()[sym])); + }, 100); +}).listen(3000); +``` + +```cjs const { createServer } = require('http'); const { executionAsyncId, @@ -558,7 +698,16 @@ changes: * Returns: {number} The `asyncId` of the current execution context. Useful to track when something calls. -```js +```mjs +import { executionAsyncId } from 'async_hooks'; + +console.log(executionAsyncId()); // 1 - bootstrap +fs.open(path, 'r', (err, fd) => { + console.log(executionAsyncId()); // 6 - open() +}); +``` + +```cjs const async_hooks = require('async_hooks'); console.log(async_hooks.executionAsyncId()); // 1 - bootstrap @@ -616,10 +765,21 @@ expensive nature of the [promise introspection API][PromiseHooks] provided by V8. This means that programs using promises or `async`/`await` will not get correct execution and trigger ids for promise callback contexts by default. -```js -const ah = require('async_hooks'); +```mjs +import { executionAsyncId, triggerAsyncId } from 'async_hooks'; + +Promise.resolve(1729).then(() => { + console.log(`eid ${executionAsyncId()} tid ${triggerAsyncId()}`); +}); +// produces: +// eid 1 tid 0 +``` + +```cjs +const { executionAsyncId, triggerAsyncId } = require('async_hooks'); + Promise.resolve(1729).then(() => { - console.log(`eid ${ah.executionAsyncId()} tid ${ah.triggerAsyncId()}`); + console.log(`eid ${executionAsyncId()} tid ${triggerAsyncId()}`); }); // produces: // eid 1 tid 0 @@ -633,11 +793,22 @@ the resource that caused (triggered) the `then()` callback to be executed. Installing async hooks via `async_hooks.createHook` enables promise execution tracking: -```js -const ah = require('async_hooks'); -ah.createHook({ init() {} }).enable(); // forces PromiseHooks to be enabled. +```mjs +import { createHook, executionAsyncId, triggerAsyncId } from 'async_hooks'; +createHook({ init() {} }).enable(); // forces PromiseHooks to be enabled. +Promise.resolve(1729).then(() => { + console.log(`eid ${executionAsyncId()} tid ${triggerAsyncId()}`); +}); +// produces: +// eid 7 tid 6 +``` + +```cjs +const { createHook, exectionAsyncId, triggerAsyncId } = require('async_hooks'); + +createHook({ init() {} }).enable(); // forces PromiseHooks to be enabled. Promise.resolve(1729).then(() => { - console.log(`eid ${ah.executionAsyncId()} tid ${ah.triggerAsyncId()}`); + console.log(`eid ${executionAsyncId()} tid ${triggerAsyncId()}`); }); // produces: // eid 7 tid 6 diff --git a/doc/api/buffer.md b/doc/api/buffer.md index 1ad3bf7cb78b09..d762b4b0e3ae91 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -13,10 +13,45 @@ The `Buffer` class is a subclass of JavaScript's [`Uint8Array`][] class and extends it with methods that cover additional use cases. Node.js APIs accept plain [`Uint8Array`][]s wherever `Buffer`s are supported as well. -The `Buffer` class is within the global scope, making it unlikely that one -would need to ever use `require('buffer').Buffer`. +While the `Buffer` class is available within the global scope, it is still +recommended to explicitly reference it via an import or require statement. + +```mjs +import { Buffer } from 'buffer'; + +// Creates a zero-filled Buffer of length 10. +const buf1 = Buffer.alloc(10); + +// Creates a Buffer of length 10, +// filled with bytes which all have the value `1`. +const buf2 = Buffer.alloc(10, 1); + +// Creates an uninitialized buffer of length 10. +// This is faster than calling Buffer.alloc() but the returned +// Buffer instance might contain old data that needs to be +// overwritten using fill(), write(), or other functions that fill the Buffer's +// contents. +const buf3 = Buffer.allocUnsafe(10); + +// Creates a Buffer containing the bytes [1, 2, 3]. +const buf4 = Buffer.from([1, 2, 3]); + +// Creates a Buffer containing the bytes [1, 1, 1, 1] – the entries +// are all truncated using `(value & 255)` to fit into the range 0–255. +const buf5 = Buffer.from([257, 257.5, -255, '1']); + +// Creates a Buffer containing the UTF-8-encoded bytes for the string 'tést': +// [0x74, 0xc3, 0xa9, 0x73, 0x74] (in hexadecimal notation) +// [116, 195, 169, 115, 116] (in decimal notation) +const buf6 = Buffer.from('tést'); + +// Creates a Buffer containing the Latin-1 bytes [0x74, 0xe9, 0x73, 0x74]. +const buf7 = Buffer.from('tést', 'latin1'); +``` + +```cjs +const { Buffer } = require('buffer'); -```js // Creates a zero-filled Buffer of length 10. const buf1 = Buffer.alloc(10); @@ -65,7 +100,25 @@ When converting between `Buffer`s and strings, a character encoding may be specified. If no character encoding is specified, UTF-8 will be used as the default. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('hello world', 'utf8'); + +console.log(buf.toString('hex')); +// Prints: 68656c6c6f20776f726c64 +console.log(buf.toString('base64')); +// Prints: aGVsbG8gd29ybGQ= + +console.log(Buffer.from('fhqwhgads', 'utf8')); +// Prints: +console.log(Buffer.from('fhqwhgads', 'utf16le')); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('hello world', 'utf8'); console.log(buf.toString('hex')); @@ -142,7 +195,23 @@ The following legacy character encodings are also supported: of UTF-16 that did not support characters that had code points larger than U+FFFF. In Node.js, these code points are always supported. -```js +```mjs +import { Buffer } from 'buffer'; + +Buffer.from('1ag', 'hex'); +// Prints , data truncated when first non-hexadecimal value +// ('g') encountered. + +Buffer.from('1a7g', 'hex'); +// Prints , data truncated when data ends in single digit ('7'). + +Buffer.from('1634', 'hex'); +// Prints , all data represented. +``` + +```cjs +const { Buffer } = require('buffer'); + Buffer.from('1ag', 'hex'); // Prints , data truncated when first non-hexadecimal value // ('g') encountered. @@ -191,7 +260,20 @@ There are two ways to create new [`TypedArray`][] instances from a `Buffer`: contents, interpreted as an array of integers, and not as a byte sequence of the target type. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3, 4]); +const uint32array = new Uint32Array(buf); + +console.log(uint32array); + +// Prints: Uint32Array(4) [ 1, 2, 3, 4 ] +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3, 4]); const uint32array = new Uint32Array(buf); @@ -203,7 +285,23 @@ console.log(uint32array); * Passing the `Buffer`s underlying [`ArrayBuffer`][] will create a [`TypedArray`][] that shares its memory with the `Buffer`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('hello', 'utf16le'); +const uint16array = new Uint16Array( + buf.buffer, + buf.byteOffset, + buf.length / Uint16Array.BYTES_PER_ELEMENT); + +console.log(uint16array); + +// Prints: Uint16Array(5) [ 104, 101, 108, 108, 111 ] +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('hello', 'utf16le'); const uint16array = new Uint16Array( buf.buffer, @@ -220,7 +318,36 @@ memory as a [`TypedArray`][] instance by using the `TypedArray` object’s `.buffer` property in the same way. [`Buffer.from()`][`Buffer.from(arrayBuf)`] behaves like `new Uint8Array()` in this context. -```js +```mjs +import { Buffer } from 'buffer'; + +const arr = new Uint16Array(2); + +arr[0] = 5000; +arr[1] = 4000; + +// Copies the contents of `arr`. +const buf1 = Buffer.from(arr); + +// Shares memory with `arr`. +const buf2 = Buffer.from(arr.buffer); + +console.log(buf1); +// Prints: +console.log(buf2); +// Prints: + +arr[1] = 6000; + +console.log(buf1); +// Prints: +console.log(buf2); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const arr = new Uint16Array(2); arr[0] = 5000; @@ -249,7 +376,19 @@ When creating a `Buffer` using a [`TypedArray`][]'s `.buffer`, it is possible to use only a portion of the underlying [`ArrayBuffer`][] by passing in `byteOffset` and `length` parameters. -```js +```mjs +import { Buffer } from 'buffer'; + +const arr = new Uint16Array(20); +const buf = Buffer.from(arr.buffer, 0, 16); + +console.log(buf.length); +// Prints: 16 +``` + +```cjs +const { Buffer } = require('buffer'); + const arr = new Uint16Array(20); const buf = Buffer.from(arr.buffer, 0, 16); @@ -276,7 +415,23 @@ function: `Buffer` instances can be iterated over using `for..of` syntax: -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3]); + +for (const b of buf) { + console.log(b); +} +// Prints: +// 1 +// 2 +// 3 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3]); for (const b of buf) { @@ -378,11 +533,39 @@ destinations without transferring or immediately copying the data. The data contained by the `Blob` is copied only when the `arrayBuffer()` or `text()` methods are called. -```js -const { Blob } = require('buffer'); +```mjs +import { Blob, Buffer } from 'buffer'; +import { setTimeout as delay } from 'timers/promises'; + const blob = new Blob(['hello there']); + +const mc1 = new MessageChannel(); +const mc2 = new MessageChannel(); + +mc1.port1.onmessage = async ({ data }) => { + console.log(await data.arrayBuffer()); + mc1.port1.close(); +}; + +mc2.port1.onmessage = async ({ data }) => { + await delay(1000); + console.log(await data.arrayBuffer()); + mc2.port1.close(); +}; + +mc1.port2.postMessage(blob); +mc2.port2.postMessage(blob); + +// The Blob is still usable after posting. +data.text().then(console.log); +``` + +```cjs +const { Blob, Buffer } = require('buffer'); const { setTimeout: delay } = require('timers/promises'); +const blob = new Blob(['hello there']); + const mc1 = new MessageChannel(); const mc2 = new MessageChannel(); @@ -440,7 +623,18 @@ changes: Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the `Buffer` will be zero-filled. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.alloc(5); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.alloc(5); console.log(buf); @@ -454,7 +648,18 @@ is thrown. If `fill` is specified, the allocated `Buffer` will be initialized by calling [`buf.fill(fill)`][`buf.fill()`]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.alloc(5, 'a'); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.alloc(5, 'a'); console.log(buf); @@ -464,7 +669,18 @@ console.log(buf); If both `fill` and `encoding` are specified, the allocated `Buffer` will be initialized by calling [`buf.fill(fill, encoding)`][`buf.fill()`]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); console.log(buf); @@ -502,7 +718,23 @@ initialized*. The contents of the newly created `Buffer` are unknown and *may contain sensitive data*. Use [`Buffer.alloc()`][] instead to initialize `Buffer` instances with zeroes. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(10); + +console.log(buf); +// Prints (contents may vary): + +buf.fill(0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(10); console.log(buf); @@ -564,7 +796,29 @@ memory from a pool for an indeterminate amount of time, it may be appropriate to create an un-pooled `Buffer` instance using `Buffer.allocUnsafeSlow()` and then copying out the relevant bits. -```js +```mjs +import { Buffer } from 'buffer'; + +// Need to keep around a few small chunks of memory. +const store = []; + +socket.on('readable', () => { + let data; + while (null !== (data = readable.read())) { + // Allocate for retained data. + const sb = Buffer.allocUnsafeSlow(10); + + // Copy the data into the new allocation. + data.copy(sb, 0, 0, 10); + + store.push(sb); + } +}); +``` + +```cjs +const { Buffer } = require('buffer'); + // Need to keep around a few small chunks of memory. const store = []; @@ -612,7 +866,19 @@ For strings that contain non-base64/hex-encoded data (e.g. whitespace), the return value might be greater than the length of a `Buffer` created from the string. -```js +```mjs +import { Buffer } from 'buffer'; + +const str = '\u00bd + \u00bc = \u00be'; + +console.log(`${str}: ${str.length} characters, ` + + `${Buffer.byteLength(str, 'utf8')} bytes`); +// Prints: ½ + ¼ = ¾: 9 characters, 12 bytes +``` + +```cjs +const { Buffer } = require('buffer'); + const str = '\u00bd + \u00bc = \u00be'; console.log(`${str}: ${str.length} characters, ` + @@ -642,7 +908,21 @@ Compares `buf1` to `buf2`, typically for the purpose of sorting arrays of `Buffer` instances. This is equivalent to calling [`buf1.compare(buf2)`][`buf.compare()`]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('1234'); +const buf2 = Buffer.from('0123'); +const arr = [buf1, buf2]; + +console.log(arr.sort(Buffer.compare)); +// Prints: [ , ] +// (This result is equal to: [buf2, buf1].) +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('1234'); const buf2 = Buffer.from('0123'); const arr = [buf1, buf2]; @@ -680,7 +960,30 @@ If `totalLength` is provided, it is coerced to an unsigned integer. If the combined length of the `Buffer`s in `list` exceeds `totalLength`, the result is truncated to `totalLength`. -```js +```mjs +import { Buffer } from 'buffer'; + +// Create a single `Buffer` from a list of three `Buffer` instances. + +const buf1 = Buffer.alloc(10); +const buf2 = Buffer.alloc(14); +const buf3 = Buffer.alloc(18); +const totalLength = buf1.length + buf2.length + buf3.length; + +console.log(totalLength); +// Prints: 42 + +const bufA = Buffer.concat([buf1, buf2, buf3], totalLength); + +console.log(bufA); +// Prints: +console.log(bufA.length); +// Prints: 42 +``` + +```cjs +const { Buffer } = require('buffer'); + // Create a single `Buffer` from a list of three `Buffer` instances. const buf1 = Buffer.alloc(10); @@ -712,15 +1015,24 @@ added: v5.10.0 Allocates a new `Buffer` using an `array` of bytes in the range `0` – `255`. Array entries outside that range will be truncated to fit into it. -```js +```mjs +import { Buffer } from 'buffer'; + // Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); ``` -A `TypeError` will be thrown if `array` is not an `Array` or another type -appropriate for `Buffer.from()` variants. +```cjs +const { Buffer } = require('buffer'); -`Buffer.from(array)` and [`Buffer.from(string)`][] may also use the internal +// Creates a new Buffer containing the UTF-8 bytes of the string 'buffer'. +const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]); +``` + +A `TypeError` will be thrown if `array` is not an `Array` or another type +appropriate for `Buffer.from()` variants. + +`Buffer.from(array)` and [`Buffer.from(string)`][] may also use the internal `Buffer` pool like [`Buffer.allocUnsafe()`][] does. ### Static method: `Buffer.from(arrayBuffer[, byteOffset[, length]])` @@ -740,7 +1052,30 @@ memory. For example, when passed a reference to the `.buffer` property of a [`TypedArray`][] instance, the newly created `Buffer` will share the same allocated memory as the [`TypedArray`][]'s underlying `ArrayBuffer`. -```js +```mjs +import { Buffer } from 'buffer'; + +const arr = new Uint16Array(2); + +arr[0] = 5000; +arr[1] = 4000; + +// Shares memory with `arr`. +const buf = Buffer.from(arr.buffer); + +console.log(buf); +// Prints: + +// Changing the original Uint16Array changes the Buffer also. +arr[1] = 6000; + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const arr = new Uint16Array(2); arr[0] = 5000; @@ -762,7 +1097,19 @@ console.log(buf); The optional `byteOffset` and `length` arguments specify a memory range within the `arrayBuffer` that will be shared by the `Buffer`. -```js +```mjs +import { Buffer } from 'buffer'; + +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); + +console.log(buf.length); +// Prints: 2 +``` + +```cjs +const { Buffer } = require('buffer'); + const ab = new ArrayBuffer(10); const buf = Buffer.from(ab, 0, 2); @@ -779,7 +1126,21 @@ of memory that extends beyond the bounds of a `TypedArray` view. A new `Buffer` created using the `buffer` property of a `TypedArray` may extend beyond the range of the `TypedArray`: -```js +```mjs +import { Buffer } from 'buffer'; + +const arrA = Uint8Array.from([0x63, 0x64, 0x65, 0x66]); // 4 elements +const arrB = new Uint8Array(arrA.buffer, 1, 2); // 2 elements +console.log(arrA.buffer === arrB.buffer); // true + +const buf = Buffer.from(arrB.buffer); +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const arrA = Uint8Array.from([0x63, 0x64, 0x65, 0x66]); // 4 elements const arrB = new Uint8Array(arrA.buffer, 1, 2); // 2 elements console.log(arrA.buffer === arrB.buffer); // true @@ -799,7 +1160,23 @@ added: v5.10.0 Copies the passed `buffer` data onto a new `Buffer` instance. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; + +console.log(buf1.toString()); +// Prints: auffer +console.log(buf2.toString()); +// Prints: buffer +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('buffer'); const buf2 = Buffer.from(buf1); @@ -826,7 +1203,16 @@ added: v8.2.0 For objects whose `valueOf()` function returns a value not strictly equal to `object`, returns `Buffer.from(object.valueOf(), offsetOrEncoding, length)`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from(new String('this is a test')); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from(new String('this is a test')); // Prints: ``` @@ -834,7 +1220,22 @@ const buf = Buffer.from(new String('this is a test')); For objects that support `Symbol.toPrimitive`, returns `Buffer.from(object[Symbol.toPrimitive]('string'), offsetOrEncoding)`. -```js +```mjs +import { Buffer } from 'buffer'; + +class Foo { + [Symbol.toPrimitive]() { + return 'this is a test'; + } +} + +const buf = Buffer.from(new Foo(), 'utf8'); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + class Foo { [Symbol.toPrimitive]() { return 'this is a test'; @@ -859,7 +1260,23 @@ added: v5.10.0 Creates a new `Buffer` containing `string`. The `encoding` parameter identifies the character encoding to be used when converting `string` into bytes. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('this is a tést'); +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); + +console.log(buf1.toString()); +// Prints: this is a tést +console.log(buf2.toString()); +// Prints: this is a tést +console.log(buf1.toString('latin1')); +// Prints: this is a tést +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('this is a tést'); const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); @@ -884,7 +1301,19 @@ added: v0.1.101 Returns `true` if `obj` is a `Buffer`, `false` otherwise. -```js +```mjs +import { Buffer } from 'buffer'; + +Buffer.isBuffer(Buffer.alloc(10)); // true +Buffer.isBuffer(Buffer.from('foo')); // true +Buffer.isBuffer('a string'); // false +Buffer.isBuffer([]); // false +Buffer.isBuffer(new Uint8Array(1024)); // false +``` + +```cjs +const { Buffer } = require('buffer'); + Buffer.isBuffer(Buffer.alloc(10)); // true Buffer.isBuffer(Buffer.from('foo')); // true Buffer.isBuffer('a string'); // false @@ -903,7 +1332,25 @@ added: v0.9.1 Returns `true` if `encoding` is the name of a supported character encoding, or `false` otherwise. -```js +```mjs +import { Buffer } from 'buffer'; + +console.log(Buffer.isEncoding('utf8')); +// Prints: true + +console.log(Buffer.isEncoding('hex')); +// Prints: true + +console.log(Buffer.isEncoding('utf/8')); +// Prints: false + +console.log(Buffer.isEncoding('')); +// Prints: false +``` + +```cjs +const { Buffer } = require('buffer'); + console.log(Buffer.isEncoding('utf8')); // Prints: true @@ -941,7 +1388,27 @@ access is the same as `Uint8Array`. In other words, `buf[index]` returns `buf[index] = value` does not modify the buffer if `index` is negative or `>= buf.length`. -```js +```mjs +import { Buffer } from 'buffer'; + +// Copy an ASCII string into a `Buffer` one byte at a time. +// (This only works for ASCII-only strings. In general, one should use +// `Buffer.from()` to perform this conversion.) + +const str = 'Node.js'; +const buf = Buffer.allocUnsafe(str.length); + +for (let i = 0; i < str.length; i++) { + buf[i] = str.charCodeAt(i); +} + +console.log(buf.toString('utf8')); +// Prints: Node.js +``` + +```cjs +const { Buffer } = require('buffer'); + // Copy an ASCII string into a `Buffer` one byte at a time. // (This only works for ASCII-only strings. In general, one should use // `Buffer.from()` to perform this conversion.) @@ -965,7 +1432,19 @@ console.log(buf.toString('utf8')); This `ArrayBuffer` is not guaranteed to correspond exactly to the original `Buffer`. See the notes on `buf.byteOffset` for details. -```js +```mjs +import { Buffer } from 'buffer'; + +const arrayBuffer = new ArrayBuffer(16); +const buffer = Buffer.from(arrayBuffer); + +console.log(buffer.buffer === arrayBuffer); +// Prints: true +``` + +```cjs +const { Buffer } = require('buffer'); + const arrayBuffer = new ArrayBuffer(16); const buffer = Buffer.from(arrayBuffer); @@ -988,7 +1467,21 @@ to the `Buffer` object itself. A common issue when creating a `TypedArray` object that shares its memory with a `Buffer` is that in this case one needs to specify the `byteOffset` correctly: -```js +```mjs +import { Buffer } from 'buffer'; + +// Create a buffer smaller than `Buffer.poolSize`. +const nodeBuffer = new Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); + +// When casting the Node.js Buffer to an Int8Array, use the byteOffset +// to refer only to the part of `nodeBuffer.buffer` that contains the memory +// for `nodeBuffer`. +new Int8Array(nodeBuffer.buffer, nodeBuffer.byteOffset, nodeBuffer.length); +``` + +```cjs +const { Buffer } = require('buffer'); + // Create a buffer smaller than `Buffer.poolSize`. const nodeBuffer = new Buffer.from([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]); @@ -1030,7 +1523,31 @@ Comparison is based on the actual sequence of bytes in each `Buffer`. * `1` is returned if `target` should come *before* `buf` when sorted. * `-1` is returned if `target` should come *after* `buf` when sorted. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('ABC'); +const buf2 = Buffer.from('BCD'); +const buf3 = Buffer.from('ABCD'); + +console.log(buf1.compare(buf1)); +// Prints: 0 +console.log(buf1.compare(buf2)); +// Prints: -1 +console.log(buf1.compare(buf3)); +// Prints: -1 +console.log(buf2.compare(buf1)); +// Prints: 1 +console.log(buf2.compare(buf3)); +// Prints: 1 +console.log([buf1, buf2, buf3].sort(Buffer.compare)); +// Prints: [ , , ] +// (This result is equal to: [buf1, buf3, buf2].) +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('ABC'); const buf2 = Buffer.from('BCD'); const buf3 = Buffer.from('ABCD'); @@ -1054,7 +1571,23 @@ The optional `targetStart`, `targetEnd`, `sourceStart`, and `sourceEnd` arguments can be used to limit the comparison to specific ranges within `target` and `buf` respectively. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); +const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); + +console.log(buf1.compare(buf2, 5, 9, 0, 4)); +// Prints: 0 +console.log(buf1.compare(buf2, 0, 6, 4)); +// Prints: -1 +console.log(buf1.compare(buf2, 5, 6, 5)); +// Prints: 1 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8, 9]); const buf2 = Buffer.from([5, 6, 7, 8, 9, 1, 2, 3, 4]); @@ -1090,7 +1623,9 @@ memory region overlaps with `buf`. for all TypedArrays, including Node.js `Buffer`s, although it takes different function arguments. -```js +```mjs +import { Buffer } from 'buffer'; + // Create two `Buffer` instances. const buf1 = Buffer.allocUnsafe(26); const buf2 = Buffer.allocUnsafe(26).fill('!'); @@ -1109,7 +1644,49 @@ console.log(buf2.toString('ascii', 0, 25)); // Prints: !!!!!!!!qrst!!!!!!!!!!!!! ``` -```js +```cjs +const { Buffer } = require('buffer'); + +// Create two `Buffer` instances. +const buf1 = Buffer.allocUnsafe(26); +const buf2 = Buffer.allocUnsafe(26).fill('!'); + +for (let i = 0; i < 26; i++) { + // 97 is the decimal ASCII value for 'a'. + buf1[i] = i + 97; +} + +// Copy `buf1` bytes 16 through 19 into `buf2` starting at byte 8 of `buf2`. +buf1.copy(buf2, 8, 16, 20); +// This is equivalent to: +// buf2.set(buf1.subarray(16, 20), 8); + +console.log(buf2.toString('ascii', 0, 25)); +// Prints: !!!!!!!!qrst!!!!!!!!!!!!! +``` + +```mjs +import { Buffer } from 'buffer'; + +// Create a `Buffer` and copy data from one region to an overlapping region +// within the same `Buffer`. + +const buf = Buffer.allocUnsafe(26); + +for (let i = 0; i < 26; i++) { + // 97 is the decimal ASCII value for 'a'. + buf[i] = i + 97; +} + +buf.copy(buf, 0, 4, 10); + +console.log(buf.toString()); +// Prints: efghijghijklmnopqrstuvwxyz +``` + +```cjs +const { Buffer } = require('buffer'); + // Create a `Buffer` and copy data from one region to an overlapping region // within the same `Buffer`. @@ -1136,7 +1713,28 @@ added: v1.1.0 Creates and returns an [iterator][] of `[index, byte]` pairs from the contents of `buf`. -```js +```mjs +import { Buffer } from 'buffer'; + +// Log the entire contents of a `Buffer`. + +const buf = Buffer.from('buffer'); + +for (const pair of buf.entries()) { + console.log(pair); +} +// Prints: +// [0, 98] +// [1, 117] +// [2, 102] +// [3, 102] +// [4, 101] +// [5, 114] +``` + +```cjs +const { Buffer } = require('buffer'); + // Log the entire contents of a `Buffer`. const buf = Buffer.from('buffer'); @@ -1170,7 +1768,22 @@ Returns `true` if both `buf` and `otherBuffer` have exactly the same bytes, `false` otherwise. Equivalent to [`buf.compare(otherBuffer) === 0`][`buf.compare()`]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('ABC'); +const buf2 = Buffer.from('414243', 'hex'); +const buf3 = Buffer.from('ABCD'); + +console.log(buf1.equals(buf2)); +// Prints: true +console.log(buf1.equals(buf3)); +// Prints: false +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('ABC'); const buf2 = Buffer.from('414243', 'hex'); const buf3 = Buffer.from('ABCD'); @@ -1216,7 +1829,20 @@ changes: Fills `buf` with the specified `value`. If the `offset` and `end` are not given, the entire `buf` will be filled: -```js +```mjs +import { Buffer } from 'buffer'; + +// Fill a `Buffer` with the ASCII character 'h'. + +const b = Buffer.allocUnsafe(50).fill('h'); + +console.log(b.toString()); +// Prints: hhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhhh +``` + +```cjs +const { Buffer } = require('buffer'); + // Fill a `Buffer` with the ASCII character 'h'. const b = Buffer.allocUnsafe(50).fill('h'); @@ -1232,17 +1858,43 @@ filled with `value & 255`. If the final write of a `fill()` operation falls on a multi-byte character, then only the bytes of that character that fit into `buf` are written: -```js +```mjs +import { Buffer } from 'buffer'; + // Fill a `Buffer` with character that takes up two bytes in UTF-8. console.log(Buffer.allocUnsafe(5).fill('\u0222')); // Prints: ``` -If `value` contains invalid characters, it is truncated; if no valid -fill data remains, an exception is thrown: +```cjs +const { Buffer } = require('buffer'); + +// Fill a `Buffer` with character that takes up two bytes in UTF-8. + +console.log(Buffer.allocUnsafe(5).fill('\u0222')); +// Prints: +``` + +If `value` contains invalid characters, it is truncated; if no valid +fill data remains, an exception is thrown: + +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(5); + +console.log(buf.fill('a')); +// Prints: +console.log(buf.fill('aazz', 'hex')); +// Prints: +console.log(buf.fill('zz', 'hex')); +// Throws an exception. +``` + +```cjs +const { Buffer } = require('buffer'); -```js const buf = Buffer.allocUnsafe(5); console.log(buf.fill('a')); @@ -1267,7 +1919,30 @@ added: v5.3.0 Equivalent to [`buf.indexOf() !== -1`][`buf.indexOf()`]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('this is a buffer'); + +console.log(buf.includes('this')); +// Prints: true +console.log(buf.includes('is')); +// Prints: true +console.log(buf.includes(Buffer.from('a buffer'))); +// Prints: true +console.log(buf.includes(97)); +// Prints: true (97 is the decimal ASCII value for 'a') +console.log(buf.includes(Buffer.from('a buffer example'))); +// Prints: false +console.log(buf.includes(Buffer.from('a buffer example').slice(0, 8))); +// Prints: true +console.log(buf.includes('this', 4)); +// Prints: false +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('this is a buffer'); console.log(buf.includes('this')); @@ -1319,7 +1994,35 @@ If `value` is: * a number, `value` will be interpreted as an unsigned 8-bit integer value between `0` and `255`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('this is a buffer'); + +console.log(buf.indexOf('this')); +// Prints: 0 +console.log(buf.indexOf('is')); +// Prints: 2 +console.log(buf.indexOf(Buffer.from('a buffer'))); +// Prints: 8 +console.log(buf.indexOf(97)); +// Prints: 8 (97 is the decimal ASCII value for 'a') +console.log(buf.indexOf(Buffer.from('a buffer example'))); +// Prints: -1 +console.log(buf.indexOf(Buffer.from('a buffer example').slice(0, 8))); +// Prints: 8 + +const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + +console.log(utf16Buffer.indexOf('\u03a3', 0, 'utf16le')); +// Prints: 4 +console.log(utf16Buffer.indexOf('\u03a3', -4, 'utf16le')); +// Prints: 6 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('this is a buffer'); console.log(buf.indexOf('this')); @@ -1351,7 +2054,27 @@ If `byteOffset` is not a number, it will be coerced to a number. If the result of coercion is `NaN` or `0`, then the entire buffer will be searched. This behavior matches [`String.prototype.indexOf()`][]. -```js +```mjs +import { Buffer } from 'buffer'; + +const b = Buffer.from('abcdef'); + +// Passing a value that's a number, but not a valid byte. +// Prints: 2, equivalent to searching for 99 or 'c'. +console.log(b.indexOf(99.9)); +console.log(b.indexOf(256 + 99)); + +// Passing a byteOffset that coerces to NaN or 0. +// Prints: 1, searching the whole buffer. +console.log(b.indexOf('b', undefined)); +console.log(b.indexOf('b', {})); +console.log(b.indexOf('b', null)); +console.log(b.indexOf('b', [])); +``` + +```cjs +const { Buffer } = require('buffer'); + const b = Buffer.from('abcdef'); // Passing a value that's a number, but not a valid byte. @@ -1380,7 +2103,26 @@ added: v1.1.0 Creates and returns an [iterator][] of `buf` keys (indices). -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('buffer'); + +for (const key of buf.keys()) { + console.log(key); +} +// Prints: +// 0 +// 1 +// 2 +// 3 +// 4 +// 5 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('buffer'); for (const key of buf.keys()) { @@ -1417,7 +2159,37 @@ changes: Identical to [`buf.indexOf()`][], except the last occurrence of `value` is found rather than the first occurrence. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('this buffer is a buffer'); + +console.log(buf.lastIndexOf('this')); +// Prints: 0 +console.log(buf.lastIndexOf('buffer')); +// Prints: 17 +console.log(buf.lastIndexOf(Buffer.from('buffer'))); +// Prints: 17 +console.log(buf.lastIndexOf(97)); +// Prints: 15 (97 is the decimal ASCII value for 'a') +console.log(buf.lastIndexOf(Buffer.from('yolo'))); +// Prints: -1 +console.log(buf.lastIndexOf('buffer', 5)); +// Prints: 5 +console.log(buf.lastIndexOf('buffer', 4)); +// Prints: -1 + +const utf16Buffer = Buffer.from('\u039a\u0391\u03a3\u03a3\u0395', 'utf16le'); + +console.log(utf16Buffer.lastIndexOf('\u03a3', undefined, 'utf16le')); +// Prints: 6 +console.log(utf16Buffer.lastIndexOf('\u03a3', -5, 'utf16le')); +// Prints: 4 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('this buffer is a buffer'); console.log(buf.lastIndexOf('this')); @@ -1451,7 +2223,30 @@ If `byteOffset` is not a number, it will be coerced to a number. Any arguments that coerce to `NaN`, like `{}` or `undefined`, will search the whole buffer. This behavior matches [`String.prototype.lastIndexOf()`][]. -```js +```mjs +import { Buffer } from 'buffer'; + +const b = Buffer.from('abcdef'); + +// Passing a value that's a number, but not a valid byte. +// Prints: 2, equivalent to searching for 99 or 'c'. +console.log(b.lastIndexOf(99.9)); +console.log(b.lastIndexOf(256 + 99)); + +// Passing a byteOffset that coerces to NaN. +// Prints: 1, searching the whole buffer. +console.log(b.lastIndexOf('b', undefined)); +console.log(b.lastIndexOf('b', {})); + +// Passing a byteOffset that coerces to 0. +// Prints: -1, equivalent to passing 0. +console.log(b.lastIndexOf('b', null)); +console.log(b.lastIndexOf('b', [])); +``` + +```cjs +const { Buffer } = require('buffer'); + const b = Buffer.from('abcdef'); // Passing a value that's a number, but not a valid byte. @@ -1481,7 +2276,25 @@ added: v0.1.90 Returns the number of bytes in `buf`. -```js +```mjs +import { Buffer } from 'buffer'; + +// Create a `Buffer` and write a shorter string to it using UTF-8. + +const buf = Buffer.alloc(1234); + +console.log(buf.length); +// Prints: 1234 + +buf.write('some string', 0, 'utf8'); + +console.log(buf.length); +// Prints: 1234 +``` + +```cjs +const { Buffer } = require('buffer'); + // Create a `Buffer` and write a shorter string to it using UTF-8. const buf = Buffer.alloc(1234); @@ -1559,7 +2372,18 @@ Reads an unsigned, big-endian 64-bit integer from `buf` at the specified This function is also available under the `readBigUint64BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + +console.log(buf.readBigUInt64BE(0)); +// Prints: 4294967295n +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); console.log(buf.readBigUInt64BE(0)); @@ -1588,7 +2412,18 @@ Reads an unsigned, little-endian 64-bit integer from `buf` at the specified This function is also available under the `readBigUint64LE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); + +console.log(buf.readBigUInt64LE(0)); +// Prints: 18446744069414584320n +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0xff, 0xff]); console.log(buf.readBigUInt64LE(0)); @@ -1611,7 +2446,18 @@ changes: Reads a 64-bit, big-endian double from `buf` at the specified `offset`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + +console.log(buf.readDoubleBE(0)); +// Prints: 8.20788039913184e-304 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); console.log(buf.readDoubleBE(0)); @@ -1634,7 +2480,20 @@ changes: Reads a 64-bit, little-endian double from `buf` at the specified `offset`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); + +console.log(buf.readDoubleLE(0)); +// Prints: 5.447603722011605e-270 +console.log(buf.readDoubleLE(1)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]); console.log(buf.readDoubleLE(0)); @@ -1659,7 +2518,18 @@ changes: Reads a 32-bit, big-endian float from `buf` at the specified `offset`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3, 4]); + +console.log(buf.readFloatBE(0)); +// Prints: 2.387939260590663e-38 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3, 4]); console.log(buf.readFloatBE(0)); @@ -1682,7 +2552,20 @@ changes: Reads a 32-bit, little-endian float from `buf` at the specified `offset`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, 2, 3, 4]); + +console.log(buf.readFloatLE(0)); +// Prints: 1.539989614439558e-36 +console.log(buf.readFloatLE(1)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, 2, 3, 4]); console.log(buf.readFloatLE(0)); @@ -1709,7 +2592,22 @@ Reads a signed 8-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted as two's complement signed values. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([-1, 5]); + +console.log(buf.readInt8(0)); +// Prints: -1 +console.log(buf.readInt8(1)); +// Prints: 5 +console.log(buf.readInt8(2)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([-1, 5]); console.log(buf.readInt8(0)); @@ -1738,7 +2636,18 @@ Reads a signed, big-endian 16-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted as two's complement signed values. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0, 5]); + +console.log(buf.readInt16BE(0)); +// Prints: 5 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0, 5]); console.log(buf.readInt16BE(0)); @@ -1764,7 +2673,20 @@ Reads a signed, little-endian 16-bit integer from `buf` at the specified Integers read from a `Buffer` are interpreted as two's complement signed values. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0, 5]); + +console.log(buf.readInt16LE(0)); +// Prints: 1280 +console.log(buf.readInt16LE(1)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0, 5]); console.log(buf.readInt16LE(0)); @@ -1791,7 +2713,18 @@ Reads a signed, big-endian 32-bit integer from `buf` at the specified `offset`. Integers read from a `Buffer` are interpreted as two's complement signed values. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0, 0, 0, 5]); + +console.log(buf.readInt32BE(0)); +// Prints: 5 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0, 0, 0, 5]); console.log(buf.readInt32BE(0)); @@ -1817,7 +2750,20 @@ Reads a signed, little-endian 32-bit integer from `buf` at the specified Integers read from a `Buffer` are interpreted as two's complement signed values. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0, 0, 0, 5]); + +console.log(buf.readInt32LE(0)); +// Prints: 83886080 +console.log(buf.readInt32LE(1)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0, 0, 0, 5]); console.log(buf.readInt32LE(0)); @@ -1846,7 +2792,22 @@ Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a big-endian, two's complement signed value supporting up to 48 bits of accuracy. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + +console.log(buf.readIntBE(0, 6).toString(16)); +// Prints: 1234567890ab +console.log(buf.readIntBE(1, 6).toString(16)); +// Throws ERR_OUT_OF_RANGE. +console.log(buf.readIntBE(1, 0).toString(16)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); console.log(buf.readIntBE(0, 6).toString(16)); @@ -1877,7 +2838,18 @@ Reads `byteLength` number of bytes from `buf` at the specified `offset` and interprets the result as a little-endian, two's complement signed value supporting up to 48 bits of accuracy. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + +console.log(buf.readIntLE(0, 6).toString(16)); +// Prints: -546f87a9cbee +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); console.log(buf.readIntLE(0, 6).toString(16)); @@ -1907,7 +2879,22 @@ Reads an unsigned 8-bit integer from `buf` at the specified `offset`. This function is also available under the `readUint8` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([1, -2]); + +console.log(buf.readUInt8(0)); +// Prints: 1 +console.log(buf.readUInt8(1)); +// Prints: 254 +console.log(buf.readUInt8(2)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([1, -2]); console.log(buf.readUInt8(0)); @@ -1942,7 +2929,20 @@ Reads an unsigned, big-endian 16-bit integer from `buf` at the specified This function is also available under the `readUint16BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56]); + +console.log(buf.readUInt16BE(0).toString(16)); +// Prints: 1234 +console.log(buf.readUInt16BE(1).toString(16)); +// Prints: 3456 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56]); console.log(buf.readUInt16BE(0).toString(16)); @@ -1975,7 +2975,22 @@ Reads an unsigned, little-endian 16-bit integer from `buf` at the specified This function is also available under the `readUint16LE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56]); + +console.log(buf.readUInt16LE(0).toString(16)); +// Prints: 3412 +console.log(buf.readUInt16LE(1).toString(16)); +// Prints: 5634 +console.log(buf.readUInt16LE(2).toString(16)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56]); console.log(buf.readUInt16LE(0).toString(16)); @@ -2010,7 +3025,18 @@ Reads an unsigned, big-endian 32-bit integer from `buf` at the specified This function is also available under the `readUint32BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + +console.log(buf.readUInt32BE(0).toString(16)); +// Prints: 12345678 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); console.log(buf.readUInt32BE(0).toString(16)); @@ -2041,7 +3067,20 @@ Reads an unsigned, little-endian 32-bit integer from `buf` at the specified This function is also available under the `readUint32LE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); + +console.log(buf.readUInt32LE(0).toString(16)); +// Prints: 78563412 +console.log(buf.readUInt32LE(1).toString(16)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78]); console.log(buf.readUInt32LE(0).toString(16)); @@ -2077,7 +3116,20 @@ up to 48 bits of accuracy. This function is also available under the `readUintBE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + +console.log(buf.readUIntBE(0, 6).toString(16)); +// Prints: 1234567890ab +console.log(buf.readUIntBE(1, 6).toString(16)); +// Throws ERR_OUT_OF_RANGE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); console.log(buf.readUIntBE(0, 6).toString(16)); @@ -2113,7 +3165,18 @@ up to 48 bits of accuracy. This function is also available under the `readUintLE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); + +console.log(buf.readUIntLE(0, 6).toString(16)); +// Prints: ab9078563412 +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x12, 0x34, 0x56, 0x78, 0x90, 0xab]); console.log(buf.readUIntLE(0, 6).toString(16)); @@ -2141,7 +3204,33 @@ This method is inherited from [`TypedArray.prototype.subarray()`][]. Modifying the new `Buffer` slice will modify the memory in the original `Buffer` because the allocated memory of the two objects overlap. -```js +```mjs +import { Buffer } from 'buffer'; + +// Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte +// from the original `Buffer`. + +const buf1 = Buffer.allocUnsafe(26); + +for (let i = 0; i < 26; i++) { + // 97 is the decimal ASCII value for 'a'. + buf1[i] = i + 97; +} + +const buf2 = buf1.subarray(0, 3); + +console.log(buf2.toString('ascii', 0, buf2.length)); +// Prints: abc + +buf1[0] = 33; + +console.log(buf2.toString('ascii', 0, buf2.length)); +// Prints: !bc +``` + +```cjs +const { Buffer } = require('buffer'); + // Create a `Buffer` with the ASCII alphabet, take a slice, and modify one byte // from the original `Buffer`. @@ -2166,7 +3255,27 @@ console.log(buf2.toString('ascii', 0, buf2.length)); Specifying negative indexes causes the slice to be generated relative to the end of `buf` rather than the beginning. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('buffer'); + +console.log(buf.subarray(-6, -1).toString()); +// Prints: buffe +// (Equivalent to buf.subarray(0, 5).) + +console.log(buf.subarray(-6, -2).toString()); +// Prints: buff +// (Equivalent to buf.subarray(0, 4).) + +console.log(buf.subarray(-5, -2).toString()); +// Prints: uff +// (Equivalent to buf.subarray(1, 4).) +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('buffer'); console.log(buf.subarray(-6, -1).toString()); @@ -2212,7 +3321,23 @@ This method is not compatible with the `Uint8Array.prototype.slice()`, which is a superclass of `Buffer`. To copy the slice, use `Uint8Array.prototype.slice()`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('buffer'); + +const copiedBuf = Uint8Array.prototype.slice.call(buf); +copiedBuf[0]++; +console.log(copiedBuf.toString()); +// Prints: cuffer + +console.log(buf.toString()); +// Prints: buffer +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('buffer'); const copiedBuf = Uint8Array.prototype.slice.call(buf); @@ -2235,7 +3360,28 @@ Interprets `buf` as an array of unsigned 16-bit integers and swaps the byte order *in-place*. Throws [`ERR_INVALID_BUFFER_SIZE`][] if [`buf.length`][] is not a multiple of 2. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + +console.log(buf1); +// Prints: + +buf1.swap16(); + +console.log(buf1); +// Prints: + +const buf2 = Buffer.from([0x1, 0x2, 0x3]); + +buf2.swap16(); +// Throws ERR_INVALID_BUFFER_SIZE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); console.log(buf1); @@ -2255,7 +3401,16 @@ buf2.swap16(); One convenient use of `buf.swap16()` is to perform a fast in-place conversion between UTF-16 little-endian and UTF-16 big-endian: -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); +buf.swap16(); // Convert to big-endian UTF-16 text. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from('This is little-endian UTF-16', 'utf16le'); buf.swap16(); // Convert to big-endian UTF-16 text. ``` @@ -2271,7 +3426,28 @@ Interprets `buf` as an array of unsigned 32-bit integers and swaps the byte order *in-place*. Throws [`ERR_INVALID_BUFFER_SIZE`][] if [`buf.length`][] is not a multiple of 4. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + +console.log(buf1); +// Prints: + +buf1.swap32(); + +console.log(buf1); +// Prints: + +const buf2 = Buffer.from([0x1, 0x2, 0x3]); + +buf2.swap32(); +// Throws ERR_INVALID_BUFFER_SIZE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); console.log(buf1); @@ -2298,7 +3474,28 @@ added: v6.3.0 Interprets `buf` as an array of 64-bit numbers and swaps byte order *in-place*. Throws [`ERR_INVALID_BUFFER_SIZE`][] if [`buf.length`][] is not a multiple of 8. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); + +console.log(buf1); +// Prints: + +buf1.swap64(); + +console.log(buf1); +// Prints: + +const buf2 = Buffer.from([0x1, 0x2, 0x3]); + +buf2.swap64(); +// Throws ERR_INVALID_BUFFER_SIZE. +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8]); console.log(buf1); @@ -2328,7 +3525,28 @@ this function when stringifying a `Buffer` instance. `Buffer.from()` accepts objects in the format returned from this method. In particular, `Buffer.from(buf.toJSON())` works like `Buffer.from(buf)`. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); +const json = JSON.stringify(buf); + +console.log(json); +// Prints: {"type":"Buffer","data":[1,2,3,4,5]} + +const copy = JSON.parse(json, (key, value) => { + return value && value.type === 'Buffer' ? + Buffer.from(value) : + value; +}); + +console.log(copy); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.from([0x1, 0x2, 0x3, 0x4, 0x5]); const json = JSON.stringify(buf); @@ -2365,7 +3583,34 @@ then each invalid byte is replaced with the replacement character `U+FFFD`. The maximum length of a string instance (in UTF-16 code units) is available as [`buffer.constants.MAX_STRING_LENGTH`][]. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf1 = Buffer.allocUnsafe(26); + +for (let i = 0; i < 26; i++) { + // 97 is the decimal ASCII value for 'a'. + buf1[i] = i + 97; +} + +console.log(buf1.toString('utf8')); +// Prints: abcdefghijklmnopqrstuvwxyz +console.log(buf1.toString('utf8', 0, 5)); +// Prints: abcde + +const buf2 = Buffer.from('tést'); + +console.log(buf2.toString('hex')); +// Prints: 74c3a97374 +console.log(buf2.toString('utf8', 0, 3)); +// Prints: té +console.log(buf2.toString(undefined, 0, 3)); +// Prints: té +``` + +```cjs +const { Buffer } = require('buffer'); + const buf1 = Buffer.allocUnsafe(26); for (let i = 0; i < 26; i++) { @@ -2395,10 +3640,40 @@ added: v1.1.0 * Returns: {Iterator} -Creates and returns an [iterator][] for `buf` values (bytes). This function is -called automatically when a `Buffer` is used in a `for..of` statement. +Creates and returns an [iterator][] for `buf` values (bytes). This function is +called automatically when a `Buffer` is used in a `for..of` statement. + +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.from('buffer'); + +for (const value of buf.values()) { + console.log(value); +} +// Prints: +// 98 +// 117 +// 102 +// 102 +// 101 +// 114 + +for (const value of buf) { + console.log(value); +} +// Prints: +// 98 +// 117 +// 102 +// 102 +// 101 +// 114 +``` + +```cjs +const { Buffer } = require('buffer'); -```js const buf = Buffer.from('buffer'); for (const value of buf.values()) { @@ -2442,7 +3717,27 @@ Writes `string` to `buf` at `offset` according to the character encoding in not contain enough space to fit the entire string, only part of `string` will be written. However, partially encoded characters will not be written. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.alloc(256); + +const len = buf.write('\u00bd + \u00bc = \u00be', 0); + +console.log(`${len} bytes: ${buf.toString('utf8', 0, len)}`); +// Prints: 12 bytes: ½ + ¼ = ¾ + +const buffer = Buffer.alloc(10); + +const length = buffer.write('abcd', 8); + +console.log(`${length} bytes: ${buffer.toString('utf8', 8, 10)}`); +// Prints: 2 bytes : ab +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.alloc(256); const len = buf.write('\u00bd + \u00bc = \u00be', 0); @@ -2474,7 +3769,20 @@ Writes `value` to `buf` at the specified `offset` as big-endian. `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeBigInt64BE(0x0102030405060708n, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeBigInt64BE(0x0102030405060708n, 0); @@ -2499,7 +3807,20 @@ Writes `value` to `buf` at the specified `offset` as little-endian. `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeBigInt64LE(0x0102030405060708n, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeBigInt64LE(0x0102030405060708n, 0); @@ -2530,7 +3851,20 @@ Writes `value` to `buf` at the specified `offset` as big-endian. This function is also available under the `writeBigUint64BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeBigUInt64BE(0xdecafafecacefaden, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeBigUInt64BE(0xdecafafecacefaden, 0); @@ -2559,7 +3893,20 @@ changes: Writes `value` to `buf` at the specified `offset` as little-endian -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeBigUInt64LE(0xdecafafecacefaden, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeBigUInt64LE(0xdecafafecacefaden, 0); @@ -2589,7 +3936,20 @@ Writes `value` to `buf` at the specified `offset` as big-endian. The `value` must be a JavaScript number. Behavior is undefined when `value` is anything other than a JavaScript number. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeDoubleBE(123.456, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeDoubleBE(123.456, 0); @@ -2617,7 +3977,20 @@ Writes `value` to `buf` at the specified `offset` as little-endian. The `value` must be a JavaScript number. Behavior is undefined when `value` is anything other than a JavaScript number. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(8); + +buf.writeDoubleLE(123.456, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(8); buf.writeDoubleLE(123.456, 0); @@ -2644,7 +4017,20 @@ changes: Writes `value` to `buf` at the specified `offset` as big-endian. Behavior is undefined when `value` is anything other than a JavaScript number. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeFloatBE(0xcafebabe, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeFloatBE(0xcafebabe, 0); @@ -2671,7 +4057,20 @@ changes: Writes `value` to `buf` at the specified `offset` as little-endian. Behavior is undefined when `value` is anything other than a JavaScript number. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeFloatLE(0xcafebabe, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeFloatLE(0xcafebabe, 0); @@ -2701,7 +4100,21 @@ a signed 8-bit integer. `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(2); + +buf.writeInt8(2, 0); +buf.writeInt8(-2, 1); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(2); buf.writeInt8(2, 0); @@ -2732,7 +4145,20 @@ anything other than a signed 16-bit integer. The `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(2); + +buf.writeInt16BE(0x0102, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(2); buf.writeInt16BE(0x0102, 0); @@ -2762,7 +4188,20 @@ anything other than a signed 16-bit integer. The `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(2); + +buf.writeInt16LE(0x0304, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(2); buf.writeInt16LE(0x0304, 0); @@ -2792,7 +4231,20 @@ anything other than a signed 32-bit integer. The `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeInt32BE(0x01020304, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeInt32BE(0x01020304, 0); @@ -2822,7 +4274,20 @@ anything other than a signed 32-bit integer. The `value` is interpreted and written as a two's complement signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeInt32LE(0x05060708, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeInt32LE(0x05060708, 0); @@ -2852,7 +4317,20 @@ Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as big-endian. Supports up to 48 bits of accuracy. Behavior is undefined when `value` is anything other than a signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(6); + +buf.writeIntBE(0x1234567890ab, 0, 6); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(6); buf.writeIntBE(0x1234567890ab, 0, 6); @@ -2882,7 +4360,20 @@ Writes `byteLength` bytes of `value` to `buf` at the specified `offset` as little-endian. Supports up to 48 bits of accuracy. Behavior is undefined when `value` is anything other than a signed integer. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(6); + +buf.writeIntLE(0x1234567890ab, 0, 6); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(6); buf.writeIntLE(0x1234567890ab, 0, 6); @@ -2917,7 +4408,23 @@ other than an unsigned 8-bit integer. This function is also available under the `writeUint8` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeUInt8(0x3, 0); +buf.writeUInt8(0x4, 1); +buf.writeUInt8(0x23, 2); +buf.writeUInt8(0x42, 3); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeUInt8(0x3, 0); @@ -2955,7 +4462,21 @@ is anything other than an unsigned 16-bit integer. This function is also available under the `writeUint16BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeUInt16BE(0xdead, 0); +buf.writeUInt16BE(0xbeef, 2); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeUInt16BE(0xdead, 0); @@ -2991,7 +4512,21 @@ anything other than an unsigned 16-bit integer. This function is also available under the `writeUint16LE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeUInt16LE(0xdead, 0); +buf.writeUInt16LE(0xbeef, 2); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeUInt16LE(0xdead, 0); @@ -3027,7 +4562,20 @@ is anything other than an unsigned 32-bit integer. This function is also available under the `writeUint32BE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeUInt32BE(0xfeedface, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeUInt32BE(0xfeedface, 0); @@ -3062,7 +4610,20 @@ anything other than an unsigned 32-bit integer. This function is also available under the `writeUint32LE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(4); + +buf.writeUInt32LE(0xfeedface, 0); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(4); buf.writeUInt32LE(0xfeedface, 0); @@ -3099,7 +4660,20 @@ when `value` is anything other than an unsigned integer. This function is also available under the `writeUintBE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(6); + +buf.writeUIntBE(0x1234567890ab, 0, 6); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(6); buf.writeUIntBE(0x1234567890ab, 0, 6); @@ -3136,7 +4710,20 @@ when `value` is anything other than an unsigned integer. This function is also available under the `writeUintLE` alias. -```js +```mjs +import { Buffer } from 'buffer'; + +const buf = Buffer.allocUnsafe(6); + +buf.writeUIntLE(0x1234567890ab, 0, 6); + +console.log(buf); +// Prints: +``` + +```cjs +const { Buffer } = require('buffer'); + const buf = Buffer.allocUnsafe(6); buf.writeUIntLE(0x1234567890ab, 0, 6); @@ -3378,10 +4965,18 @@ Encodings supported by `buffer.transcode()` are: `'ascii'`, `'utf8'`, The transcoding process will use substitution characters if a given byte sequence cannot be adequately represented in the target encoding. For instance: -```js -const buffer = require('buffer'); +```mjs +import { Buffer, transcode } from 'buffer'; + +const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); +console.log(newBuf.toString('ascii')); +// Prints: '?' +``` + +```cjs +const { Buffer, transcode } = require('buffer'); -const newBuf = buffer.transcode(Buffer.from('€'), 'utf8', 'ascii'); +const newBuf = transcode(Buffer.from('€'), 'utf8', 'ascii'); console.log(newBuf.toString('ascii')); // Prints: '?' ``` diff --git a/doc/api/cluster.md b/doc/api/cluster.md index d12c1f12d256d8..2b515ab82e3b6b 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -13,10 +13,42 @@ processes to handle the load. The cluster module allows easy creation of child processes that all share server ports. -```js +```mjs +import cluster from 'cluster'; +import http from 'http'; +import { cpus } from 'os'; +import process from 'process'; + +const numCPUs = cpus().length; + +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + + // Fork workers. + for (let i = 0; i < numCPUs; i++) { + cluster.fork(); + } + + cluster.on('exit', (worker, code, signal) => { + console.log(`worker ${worker.process.pid} died`); + }); +} else { + // Workers can share any TCP connection + // In this case it is an HTTP server + http.createServer((req, res) => { + res.writeHead(200); + res.end('hello world\n'); + }).listen(8000); + + console.log(`Worker ${process.pid} started`); +} +``` + +```cjs const cluster = require('cluster'); const http = require('http'); const numCPUs = require('os').cpus().length; +const process = require('process'); if (cluster.isPrimary) { console.log(`Primary ${process.pid} is running`); @@ -157,7 +189,24 @@ added: v0.11.2 Similar to the `cluster.on('exit')` event, but specific to this worker. -```js +```mjs +import cluster from 'cluster'; + +const worker = cluster.fork(); +worker.on('exit', (code, signal) => { + if (signal) { + console.log(`worker was killed by signal: ${signal}`); + } else if (code !== 0) { + console.log(`worker exited with error code: ${code}`); + } else { + console.log('worker success!'); + } +}); +``` + +```cjs +const cluster = require('cluster'); + const worker = cluster.fork(); worker.on('exit', (code, signal) => { if (signal) { @@ -179,7 +228,17 @@ added: v0.7.0 Similar to the `cluster.on('listening')` event, but specific to this worker. -```js +```mjs +import cluster from 'cluster'; + +cluster.fork().on('listening', (address) => { + // Worker is listening +}); +``` + +```cjs +const cluster = require('cluster'); + cluster.fork().on('listening', (address) => { // Worker is listening }); @@ -204,9 +263,54 @@ See [`process` event: `'message'`][]. Here is an example using the message system. It keeps a count in the primary process of the number of HTTP requests received by the workers: -```js +```mjs +import cluster from 'cluster'; +import http from 'http'; +import { cpus } from 'os'; +import process from 'process'; + +if (cluster.isPrimary) { + + // Keep track of http requests + let numReqs = 0; + setInterval(() => { + console.log(`numReqs = ${numReqs}`); + }, 1000); + + // Count requests + function messageHandler(msg) { + if (msg.cmd && msg.cmd === 'notifyRequest') { + numReqs += 1; + } + } + + // Start workers and listen for messages containing notifyRequest + const numCPUs = cpus().length; + for (let i = 0; i < numCPUs; i++) { + cluster.fork(); + } + + for (const id in cluster.workers) { + cluster.workers[id].on('message', messageHandler); + } + +} else { + + // Worker processes have a http server. + http.Server((req, res) => { + res.writeHead(200); + res.end('hello world\n'); + + // Notify primary about the request + process.send({ cmd: 'notifyRequest' }); + }).listen(8000); +} +``` + +```cjs const cluster = require('cluster'); const http = require('http'); +const process = require('process'); if (cluster.isPrimary) { @@ -387,10 +491,44 @@ added: v0.11.14 This function returns `true` if the worker's process has terminated (either because of exiting or being signaled). Otherwise, it returns `false`. -```js +```mjs +import cluster from 'cluster'; +import http from 'http'; +import { cpus } from 'os'; +import process from 'process'; + +const numCPUs = cpus().length; + +if (cluster.isPrimary) { + console.log(`Primary ${process.pid} is running`); + + // Fork workers. + for (let i = 0; i < numCPUs; i++) { + cluster.fork(); + } + + cluster.on('fork', (worker) => { + console.log('worker is dead:', worker.isDead()); + }); + + cluster.on('exit', (worker, code, signal) => { + console.log('worker is dead:', worker.isDead()); + }); +} else { + // Workers can share any TCP connection. In this case, it is an HTTP server. + http.createServer((req, res) => { + res.writeHead(200); + res.end(`Current process\n ${process.pid}`); + process.kill(process.pid); + }).listen(8000); +} +``` + +```cjs const cluster = require('cluster'); const http = require('http'); const numCPUs = require('os').cpus().length; +const process = require('process'); if (cluster.isPrimary) { console.log(`Primary ${process.pid} is running`); @@ -817,8 +955,25 @@ the `env` passed to [`.fork()`][]. The defaults above apply to the first call only; the defaults for later calls are the current values at the time of `cluster.setupPrimary()` is called. -```js +```mjs +import cluster from 'cluster'; + +cluster.setupPrimary({ + exec: 'worker.js', + args: ['--use', 'https'], + silent: true +}); +cluster.fork(); // https worker +cluster.setupPrimary({ + exec: 'worker.js', + args: ['--use', 'http'] +}); +cluster.fork(); // http worker +``` + +```cjs const cluster = require('cluster'); + cluster.setupPrimary({ exec: 'worker.js', args: ['--use', 'https'], @@ -843,7 +998,19 @@ added: v0.7.0 A reference to the current worker object. Not available in the primary process. -```js +```mjs +import cluster from 'cluster'; + +if (cluster.isPrimary) { + console.log('I am primary'); + cluster.fork(); + cluster.fork(); +} else if (cluster.isWorker) { + console.log(`I am worker #${cluster.worker.id}`); +} +``` + +```cjs const cluster = require('cluster'); if (cluster.isPrimary) { @@ -871,7 +1038,23 @@ _and_ exited. The order between these two events cannot be determined in advance. However, it is guaranteed that the removal from the `cluster.workers` list happens before last `'disconnect'` or `'exit'` event is emitted. -```js +```mjs +import cluster from 'cluster'; + +// Go through all workers +function eachWorker(callback) { + for (const id in cluster.workers) { + callback(cluster.workers[id]); + } +} +eachWorker((worker) => { + worker.send('big announcement to all workers'); +}); +``` + +```cjs +const cluster = require('cluster'); + // Go through all workers function eachWorker(callback) { for (const id in cluster.workers) { diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 8618ea2476b4b3..2daca54e7b7c47 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -10,7 +10,7 @@ The `crypto` module provides cryptographic functionality that includes a set of wrappers for OpenSSL's hash, HMAC, cipher, decipher, sign, and verify functions. ```mjs -import { createHmac } from 'crypto'; +const { createHmac } = await import('crypto'); const secret = 'abcdefg'; const hash = createHmac('sha256', secret) @@ -162,7 +162,9 @@ changes: `false` otherwise. ```mjs +import { Buffer } from 'buffer'; const { Certificate } = await import('crypto'); + const spkac = getSpkacSomehow(); console.log(Certificate.verifySpkac(Buffer.from(spkac))); // Prints: true or false @@ -170,6 +172,8 @@ console.log(Certificate.verifySpkac(Buffer.from(spkac))); ```cjs const { Certificate } = require('crypto'); +const { Buffer } = require('buffer'); + const spkac = getSpkacSomehow(); console.log(Certificate.verifySpkac(Buffer.from(spkac))); // Prints: true or false @@ -268,7 +272,9 @@ added: v0.11.8 `false` otherwise. ```mjs +import { Buffer } from 'buffer'; const { Certificate } = await import('crypto'); + const cert = Certificate(); const spkac = getSpkacSomehow(); console.log(cert.verifySpkac(Buffer.from(spkac))); @@ -277,6 +283,8 @@ console.log(cert.verifySpkac(Buffer.from(spkac))); ```cjs const { Certificate } = require('crypto'); +const { Buffer } = require('buffer'); + const cert = Certificate(); const spkac = getSpkacSomehow(); console.log(cert.verifySpkac(Buffer.from(spkac))); @@ -385,7 +393,7 @@ import { const { scrypt, randomFill, - createCipheriv, + createCipheriv } = await import('crypto'); const algorithm = 'aes-192-cbc'; @@ -456,7 +464,7 @@ Example: Using the [`cipher.update()`][] and [`cipher.final()`][] methods: const { scrypt, randomFill, - createCipheriv, + createCipheriv } = await import('crypto'); const algorithm = 'aes-192-cbc'; @@ -626,9 +634,10 @@ directly using the `new` keyword. Example: Using `Decipher` objects as streams: ```mjs +import { Buffer } from 'buffer'; const { scryptSync, - createDecipheriv, + createDecipheriv } = await import('crypto'); const algorithm = 'aes-192-cbc'; @@ -665,6 +674,7 @@ const { scryptSync, createDecipheriv, } = require('crypto'); +const { Buffer } = require('buffer'); const algorithm = 'aes-192-cbc'; const password = 'Password used to generate key'; @@ -702,10 +712,10 @@ import { createReadStream, createWriteStream, } from 'fs'; - +import { Buffer } from 'buffer'; const { scryptSync, - createDecipheriv, + createDecipheriv } = await import('crypto'); const algorithm = 'aes-192-cbc'; @@ -728,11 +738,11 @@ const { createReadStream, createWriteStream, } = require('fs'); - const { scryptSync, createDecipheriv, } = require('crypto'); +const { Buffer } = require('buffer'); const algorithm = 'aes-192-cbc'; const password = 'Password used to generate key'; @@ -752,9 +762,10 @@ input.pipe(decipher).pipe(output); Example: Using the [`decipher.update()`][] and [`decipher.final()`][] methods: ```mjs +import { Buffer } from 'buffer'; const { scryptSync, - createDecipheriv, + createDecipheriv } = await import('crypto'); const algorithm = 'aes-192-cbc'; @@ -780,6 +791,7 @@ const { scryptSync, createDecipheriv, } = require('crypto'); +const { Buffer } = require('buffer'); const algorithm = 'aes-192-cbc'; const password = 'Password used to generate key'; @@ -942,7 +954,7 @@ Instances of the `DiffieHellman` class can be created using the import assert from 'assert'; const { - createDiffieHellman, + createDiffieHellman } = await import('crypto'); // Generate Alice's keys... @@ -1159,7 +1171,7 @@ Instances of the `ECDH` class can be created using the import assert from 'assert'; const { - createECDH, + createECDH } = await import('crypto'); // Generate Alice's keys... @@ -1234,7 +1246,7 @@ Example (uncompressing a key): ```mjs const { createECDH, - ECDH, + ECDH } = await import('crypto'); const ecdh = createECDH('secp256k1'); @@ -1400,7 +1412,7 @@ Example (obtaining a shared secret): ```mjs const { createECDH, - createHash, + createHash } = await import('crypto'); const alice = createECDH('secp256k1'); @@ -1473,7 +1485,7 @@ Example: Using `Hash` objects as streams: ```mjs const { - createHash, + createHash } = await import('crypto'); const hash = createHash('sha256'); @@ -1519,36 +1531,31 @@ Example: Using `Hash` and piped streams: ```mjs import { createReadStream } from 'fs'; +import { stdout } from 'process'; +const { createHash } = await import('crypto'); -const { - createHash, -} = await import('crypto'); const hash = createHash('sha256'); const input = createReadStream('test.js'); -input.pipe(hash).setEncoding('hex').pipe(process.stdout); +input.pipe(hash).setEncoding('hex').pipe(stdout); ``` ```cjs -const { - createReadStream, -} = require('fs'); - -const { - createHash, -} = require('crypto'); +const { createReadStream } = require('fs'); +const { createHash } = require('crypto'); +const { stdout } = require('process'); const hash = createHash('sha256'); const input = createReadStream('test.js'); -input.pipe(hash).setEncoding('hex').pipe(process.stdout); +input.pipe(hash).setEncoding('hex').pipe(stdout); ``` Example: Using the [`hash.update()`][] and [`hash.digest()`][] methods: ```mjs const { - createHash, + createHash } = await import('crypto'); const hash = createHash('sha256'); @@ -1593,7 +1600,7 @@ its [`hash.digest()`][] method has been called. ```mjs // Calculate a rolling hash. const { - createHash, + createHash } = await import('crypto'); const hash = createHash('sha256'); @@ -1688,7 +1695,7 @@ Example: Using `Hmac` objects as streams: ```mjs const { - createHmac, + createHmac } = await import('crypto'); const hmac = createHmac('sha256', 'a secret'); @@ -1734,37 +1741,37 @@ Example: Using `Hmac` and piped streams: ```mjs import { createReadStream } from 'fs'; - +import { stdout } from 'process'; const { - createHmac, + createHmac } = await import('crypto'); const hmac = createHmac('sha256', 'a secret'); const input = createReadStream('test.js'); -input.pipe(hmac).pipe(process.stdout); +input.pipe(hmac).pipe(stdout); ``` ```cjs const { createReadStream, } = require('fs'); - const { createHmac, } = require('crypto'); +const { stdout } = require('process'); const hmac = createHmac('sha256', 'a secret'); const input = createReadStream('test.js'); -input.pipe(hmac).pipe(process.stdout); +input.pipe(hmac).pipe(stdout); ``` Example: Using the [`hmac.update()`][] and [`hmac.digest()`][] methods: ```mjs const { - createHmac, + createHmac } = await import('crypto'); const hmac = createHmac('sha256', 'a secret'); @@ -1863,12 +1870,8 @@ added: v15.0.0 Example: Converting a `CryptoKey` instance to a `KeyObject`: ```mjs -const { - webcrypto: { - subtle, - }, - KeyObject, -} = await import('crypto'); +const { webcrypto, KeyObject } = await import('crypto'); +const { subtle } = webcrypto; const key = await subtle.generateKey({ name: 'HMAC', @@ -2058,7 +2061,7 @@ Example: Using `Sign` and [`Verify`][] objects as streams: const { generateKeyPairSync, createSign, - createVerify, + createVerify } = await import('crypto'); const { privateKey, publicKey } = generateKeyPairSync('ec', { @@ -2106,7 +2109,7 @@ Example: Using the [`sign.update()`][] and [`verify.update()`][] methods: const { generateKeyPairSync, createSign, - createVerify, + createVerify } = await import('crypto'); const { privateKey, publicKey } = generateKeyPairSync('rsa', { @@ -3026,12 +3029,12 @@ Example: generating the sha256 sum of a file import { createReadStream } from 'fs'; - +import { argv } from 'process'; const { - createHash, + createHash } = await import('crypto'); -const filename = process.argv[2]; +const filename = argv[2]; const hash = createHash('sha256'); @@ -3052,12 +3055,12 @@ input.on('readable', () => { const { createReadStream, } = require('fs'); - const { createHash, } = require('crypto'); +const { argv } = require('process'); -const filename = process.argv[2]; +const filename = argv[2]; const hash = createHash('sha256'); @@ -3112,12 +3115,12 @@ Example: generating the sha256 HMAC of a file import { createReadStream } from 'fs'; - +import { argv } from 'process'; const { - createHmac, + createHmac } = await import('crypto'); -const filename = process.argv[2]; +const filename = argv[2]; const hmac = createHmac('sha256', 'a secret'); @@ -3138,12 +3141,12 @@ input.on('readable', () => { const { createReadStream, } = require('fs'); - const { createHmac, } = require('crypto'); +const { argv } = require('process'); -const filename = process.argv[2]; +const filename = argv[2]; const hmac = createHmac('sha256', 'a secret'); @@ -3336,7 +3339,7 @@ Asynchronously generates a new random secret key of the given `length`. The ```mjs const { - generateKey, + generateKey } = await import('crypto'); generateKey('hmac', { length: 64 }, (err, key) => { @@ -3408,7 +3411,7 @@ It is recommended to encode public keys as `'spki'` and private keys as ```mjs const { - generateKeyPair, + generateKeyPair } = await import('crypto'); generateKeyPair('rsa', { @@ -3505,7 +3508,7 @@ and to keep the passphrase confidential. ```mjs const { - generateKeyPairSync, + generateKeyPairSync } = await import('crypto'); const { @@ -3573,7 +3576,7 @@ Synchronously generates a new random secret key of the given `length`. The ```mjs const { - generateKeySync, + generateKeySync } = await import('crypto'); const key = generateKeySync('hmac', 64); @@ -3710,7 +3713,7 @@ added: v0.9.3 ```mjs const { - getCiphers, + getCiphers } = await import('crypto'); console.log(getCiphers()); // ['aes-128-cbc', 'aes-128-ccm', ...] @@ -3733,7 +3736,7 @@ added: v2.3.0 ```mjs const { - getCurves, + getCurves } = await import('crypto'); console.log(getCurves()); // ['Oakley-EC2N-3', 'Oakley-EC2N-4', ...] @@ -3770,7 +3773,7 @@ Example (obtaining a shared secret): ```mjs const { - getDiffieHellman, + getDiffieHellman } = await import('crypto'); const alice = getDiffieHellman('modp14'); const bob = getDiffieHellman('modp14'); @@ -3822,7 +3825,7 @@ added: v0.9.3 ```mjs const { - getHashes, + getHashes } = await import('crypto'); console.log(getHashes()); // ['DSA', 'DSA-SHA', 'DSA-SHA1', ...] @@ -3866,8 +3869,9 @@ be passed to the callback as an {ArrayBuffer}. An error will be thrown if any of the input arguments specify invalid values or types. ```mjs +import { Buffer } from 'buffer'; const { - hkdf, + hkdf } = await import('crypto'); hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { @@ -3880,6 +3884,7 @@ hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { const { hkdf, } = require('crypto'); +const { Buffer } = require('buffer'); hkdf('sha512', 'key', 'salt', 'info', 64, (err, derivedKey) => { if (err) throw err; @@ -3915,8 +3920,9 @@ An error will be thrown if any of the input arguments specify invalid values or types, or if the derived key cannot be generated. ```mjs +import { Buffer } from 'buffer'; const { - hkdfSync, + hkdfSync } = await import('crypto'); const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); @@ -3927,6 +3933,7 @@ console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' const { hkdfSync, } = require('crypto'); +const { Buffer } = require('buffer'); const derivedKey = hkdfSync('sha512', 'key', 'salt', 'info', 64); console.log(Buffer.from(derivedKey).toString('hex')); // '24156e2...5391653' @@ -3992,7 +3999,7 @@ When passing strings for `password` or `salt`, please consider ```mjs const { - pbkdf2, + pbkdf2 } = await import('crypto'); pbkdf2('secret', 'salt', 100000, 64, 'sha512', (err, derivedKey) => { @@ -4017,7 +4024,7 @@ The `crypto.DEFAULT_ENCODING` property can be used to change the way the deprecated and use should be avoided. ```mjs -const crypto = await import('crypto'); +import crypto from 'crypto'; crypto.DEFAULT_ENCODING = 'hex'; crypto.pbkdf2('secret', 'salt', 100000, 512, 'sha512', (err, derivedKey) => { if (err) throw err; @@ -4089,7 +4096,7 @@ When passing strings for `password` or `salt`, please consider ```mjs const { - pbkdf2Sync, + pbkdf2Sync } = await import('crypto'); const key = pbkdf2Sync('secret', 'salt', 100000, 64, 'sha512'); @@ -4110,7 +4117,7 @@ The `crypto.DEFAULT_ENCODING` property may be used to change the way the should be avoided. ```mjs -const crypto = await import('crypto'); +import crypto from 'crypto'; crypto.DEFAULT_ENCODING = 'hex'; const key = crypto.pbkdf2Sync('secret', 'salt', 100000, 512, 'sha512'); console.log(key); // '3745e48...aa39b34' @@ -4327,7 +4334,7 @@ If an error occurs, `err` will be an `Error` object; otherwise it is `null`. The ```mjs // Asynchronous const { - randomBytes, + randomBytes } = await import('crypto'); randomBytes(256, (err, buf) => { @@ -4355,7 +4362,7 @@ there is a problem generating the bytes. ```mjs // Synchronous const { - randomBytes, + randomBytes } = await import('crypto'); const buf = randomBytes(256); @@ -4411,9 +4418,8 @@ changes: Synchronous version of [`crypto.randomFill()`][]. ```mjs -const { - randomFillSync, -} = await import('crypto'); +import { Buffer } from 'buffer'; +const { randomFillSync } = await import('crypto'); const buf = Buffer.alloc(10); console.log(randomFillSync(buf).toString('hex')); @@ -4427,9 +4433,8 @@ console.log(buf.toString('hex')); ``` ```cjs -const { - randomFillSync, -} = require('crypto'); +const { randomFillSync } = require('crypto'); +const { Buffer } = require('buffer'); const buf = Buffer.alloc(10); console.log(randomFillSync(buf).toString('hex')); @@ -4446,9 +4451,8 @@ Any `ArrayBuffer`, `TypedArray` or `DataView` instance may be passed as `buffer`. ```mjs -const { - randomFillSync, -} = await import('crypto'); +import { Buffer } from 'buffer'; +const { randomFillSync } = await import('crypto'); const a = new Uint32Array(10); console.log(Buffer.from(randomFillSync(a).buffer, @@ -4463,9 +4467,8 @@ console.log(Buffer.from(randomFillSync(c)).toString('hex')); ``` ```cjs -const { - randomFillSync, -} = require('crypto'); +const { randomFillSync } = require('crypto'); +const { Buffer } = require('buffer'); const a = new Uint32Array(10); console.log(Buffer.from(randomFillSync(a).buffer, @@ -4504,9 +4507,8 @@ requires that a callback is passed in. If the `callback` function is not provided, an error will be thrown. ```mjs -const { - randomFill, -} = await import('crypto'); +import { Buffer } from 'buffer'; +const { randomFill } = await import('crypto'); const buf = Buffer.alloc(10); randomFill(buf, (err, buf) => { @@ -4527,9 +4529,8 @@ randomFill(buf, 5, 5, (err, buf) => { ``` ```cjs -const { - randomFill, -} = require('crypto'); +const { randomFill } = require('crypto'); +const { Buffer } = require('buffer'); const buf = Buffer.alloc(10); randomFill(buf, (err, buf) => { @@ -4559,9 +4560,8 @@ contains finite numbers only, they are not drawn from a uniform random distribution and have no meaningful lower or upper bounds. ```mjs -const { - randomFill, -} = await import('crypto'); +import { Buffer } from 'buffer'; +const { randomFill } = await import('crypto'); const a = new Uint32Array(10); randomFill(a, (err, buf) => { @@ -4585,9 +4585,8 @@ randomFill(c, (err, buf) => { ``` ```cjs -const { - randomFill, -} = require('crypto'); +const { randomFill } = require('crypto'); +const { Buffer } = require('buffer'); const a = new Uint32Array(10); randomFill(a, (err, buf) => { @@ -4642,7 +4641,7 @@ generated synchronously. ```mjs // Asynchronous const { - randomInt, + randomInt } = await import('crypto'); randomInt(3, (err, n) => { @@ -4666,7 +4665,7 @@ randomInt(3, (err, n) => { ```mjs // Synchronous const { - randomInt, + randomInt } = await import('crypto'); const n = randomInt(3); @@ -4686,7 +4685,7 @@ console.log(`Random number chosen from (0, 1, 2): ${n}`); ```mjs // With `min` argument const { - randomInt, + randomInt } = await import('crypto'); const n = randomInt(1, 7); @@ -4774,7 +4773,7 @@ or types. ```mjs const { - scrypt, + scrypt } = await import('crypto'); // Using the factory defaults. @@ -4854,7 +4853,7 @@ or types. ```mjs const { - scryptSync, + scryptSync } = await import('crypto'); // Using the factory defaults. @@ -5210,10 +5209,11 @@ mode must adhere to certain restrictions when using the cipher API: authentication tag. ```mjs +import { Buffer } from 'buffer'; const { createCipheriv, createDecipheriv, - randomBytes, + randomBytes } = await import('crypto'); const key = 'keykeykeykeykeykeykeykey'; @@ -5259,6 +5259,7 @@ const { createDecipheriv, randomBytes, } = require('crypto'); +const { Buffer } = require('buffer'); const key = 'keykeykeykeykeykeykeykey'; const nonce = randomBytes(12); diff --git a/doc/api/dgram.md b/doc/api/dgram.md index 8813f9bffaf384..365c30c9ae64a7 100644 --- a/doc/api/dgram.md +++ b/doc/api/dgram.md @@ -10,7 +10,30 @@ The `dgram` module provides an implementation of UDP datagram sockets. -```js +```mjs +import dgram from 'dgram'; + +const server = dgram.createSocket('udp4'); + +server.on('error', (err) => { + console.log(`server error:\n${err.stack}`); + server.close(); +}); + +server.on('message', (msg, rinfo) => { + console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); +}); + +server.on('listening', () => { + const address = server.address(); + console.log(`server listening ${address.address}:${address.port}`); +}); + +server.bind(41234); +// Prints: server listening 0.0.0.0:41234 +``` + +```cjs const dgram = require('dgram'); const server = dgram.createSocket('udp4'); @@ -123,9 +146,25 @@ When sharing a UDP socket across multiple `cluster` workers, the `socket.addMembership()` function must be called only once or an `EADDRINUSE` error will occur: -```js +```mjs +import cluster from 'cluster'; +import dgram from 'dgram'; + +if (cluster.isPrimary) { + cluster.fork(); // Works ok. + cluster.fork(); // Fails with EADDRINUSE. +} else { + const s = dgram.createSocket('udp4'); + s.bind(1234, () => { + s.addMembership('224.0.0.114'); + }); +} +``` + +```cjs const cluster = require('cluster'); const dgram = require('dgram'); + if (cluster.isPrimary) { cluster.fork(); // Works ok. cluster.fork(); // Fails with EADDRINUSE. @@ -205,7 +244,30 @@ attempting to bind with a closed socket), an [`Error`][] may be thrown. Example of a UDP server listening on port 41234: -```js +```mjs +import dgram from 'dgram'; + +const server = dgram.createSocket('udp4'); + +server.on('error', (err) => { + console.log(`server error:\n${err.stack}`); + server.close(); +}); + +server.on('message', (msg, rinfo) => { + console.log(`server got: ${msg} from ${rinfo.address}:${rinfo.port}`); +}); + +server.on('listening', () => { + const address = server.address(); + console.log(`server listening ${address.address}:${address.port}`); +}); + +server.bind(41234); +// Prints: server listening 0.0.0.0:41234 +``` + +```cjs const dgram = require('dgram'); const server = dgram.createSocket('udp4'); @@ -480,8 +542,21 @@ This method throws [`ERR_SOCKET_BAD_PORT`][] if called on an unbound socket. Example of sending a UDP packet to a port on `localhost`; -```js +```mjs +import dgram from 'dgram'; +import { Buffer } from 'buffer'; + +const message = Buffer.from('Some bytes'); +const client = dgram.createSocket('udp4'); +client.send(message, 41234, 'localhost', (err) => { + client.close(); +}); +``` + +```cjs const dgram = require('dgram'); +const { Buffer } = require('buffer'); + const message = Buffer.from('Some bytes'); const client = dgram.createSocket('udp4'); client.send(message, 41234, 'localhost', (err) => { @@ -492,8 +567,22 @@ client.send(message, 41234, 'localhost', (err) => { Example of sending a UDP packet composed of multiple buffers to a port on `127.0.0.1`; -```js +```mjs +import dgram from 'dgram'; +import { Buffer } from 'buffer'; + +const buf1 = Buffer.from('Some '); +const buf2 = Buffer.from('bytes'); +const client = dgram.createSocket('udp4'); +client.send([buf1, buf2], 41234, (err) => { + client.close(); +}); +``` + +```cjs const dgram = require('dgram'); +const { Buffer } = require('buffer'); + const buf1 = Buffer.from('Some '); const buf2 = Buffer.from('bytes'); const client = dgram.createSocket('udp4'); @@ -510,8 +599,23 @@ however, sending multiple buffers is faster. Example of sending a UDP packet using a socket connected to a port on `localhost`: -```js +```mjs +import dgram from 'dgram'; +import { Buffer } from 'buffer'; + +const message = Buffer.from('Some bytes'); +const client = dgram.createSocket('udp4'); +client.connect(41234, 'localhost', (err) => { + client.send(message, (err) => { + client.close(); + }); +}); +``` + +```cjs const dgram = require('dgram'); +const { Buffer } = require('buffer'); + const message = Buffer.from('Some bytes'); const client = dgram.createSocket('udp4'); client.connect(41234, 'localhost', (err) => { diff --git a/doc/api/diagnostics_channel.md b/doc/api/diagnostics_channel.md index 7a22b2f56eec1d..ba02a5092c3731 100644 --- a/doc/api/diagnostics_channel.md +++ b/doc/api/diagnostics_channel.md @@ -11,7 +11,11 @@ to report arbitrary message data for diagnostics purposes. It can be accessed using: -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); ``` @@ -33,7 +37,27 @@ other modules. Following is a simple overview of the public API. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +// Get a reusable channel object +const channel = diagnostics_channel.channel('my-channel'); + +// Subscribe to the channel +channel.subscribe((message, name) => { + // Received data +}); + +// Check if the channel has an active subscriber +if (channel.hasSubscribers) { + // Publish data to the channel + channel.publish({ + some: 'data' + }); +} +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); // Get a reusable channel object @@ -64,7 +88,15 @@ the message you want to send might be expensive to prepare. This API is optional but helpful when trying to publish messages from very performance-sensitive code. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +if (diagnostics_channel.hasSubscribers('my-channel')) { + // There are subscribers, prepare and publish message +} +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); if (diagnostics_channel.hasSubscribers('my-channel')) { @@ -81,7 +113,13 @@ This is the primary entry-point for anyone wanting to interact with a named channel. It produces a channel object which is optimized to reduce overhead at publish time as much as possible. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +const channel = diagnostics_channel.channel('my-channel'); +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); const channel = diagnostics_channel.channel('my-channel'); @@ -107,7 +145,17 @@ the message you want to send might be expensive to prepare. This API is optional but helpful when trying to publish messages from very performance-sensitive code. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +const channel = diagnostics_channel.channel('my-channel'); + +if (channel.hasSubscribers) { + // There are subscribers, prepare and publish message +} +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); const channel = diagnostics_channel.channel('my-channel'); @@ -124,7 +172,17 @@ if (channel.hasSubscribers) { Publish a message to any subscribers to the channel. This will trigger message handlers synchronously so they will execute within the same context. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +const channel = diagnostics_channel.channel('my-channel'); + +channel.publish({ + some: 'message' +}); +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); const channel = diagnostics_channel.channel('my-channel'); @@ -144,7 +202,17 @@ Register a message handler to subscribe to this channel. This message handler will be run synchronously whenever a message is published to the channel. Any errors thrown in the message handler will trigger an [`'uncaughtException'`][]. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +const channel = diagnostics_channel.channel('my-channel'); + +channel.subscribe((message, name) => { + // Received data +}); +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); const channel = diagnostics_channel.channel('my-channel'); @@ -161,7 +229,21 @@ channel.subscribe((message, name) => { Remove a message handler previously registered to this channel with [`channel.subscribe(onMessage)`][]. -```js +```mjs +import diagnostics_channel from 'diagnostics_channel'; + +const channel = diagnostics_channel.channel('my-channel'); + +function onMessage(message, name) { + // Received data +} + +channel.subscribe(onMessage); + +channel.unsubscribe(onMessage); +``` + +```cjs const diagnostics_channel = require('diagnostics_channel'); const channel = diagnostics_channel.channel('my-channel'); diff --git a/doc/api/esm.md b/doc/api/esm.md index 691c5d47d5a9bb..72bbf1c51cebe9 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -246,6 +246,7 @@ readFile('./foo.txt', (err, source) => { ```js import fs, { readFileSync } from 'fs'; import { syncBuiltinESMExports } from 'module'; +import { Buffer } from 'buffer'; fs.readFileSync = () => Buffer.from('Hello, ESM'); syncBuiltinESMExports(); @@ -818,8 +819,9 @@ globalThis.someInjectedProperty = 42; console.log('I just set some globals!'); const { createRequire } = getBuiltin('module'); +const { cwd } = getBuiltin('process'); -const require = createRequire(process.cwd() + '/'); +const require = createRequire(cwd() + '/'); // [...] `; } @@ -920,8 +922,9 @@ purposes. // coffeescript-loader.mjs import { URL, pathToFileURL } from 'url'; import CoffeeScript from 'coffeescript'; +import { cwd } from 'process'; -const baseURL = pathToFileURL(`${process.cwd()}/`).href; +const baseURL = pathToFileURL(`${cwd()}/`).href; // CoffeeScript files end in .coffee, .litcoffee or .coffee.md. const extensionsRegex = /\.coffee$|\.litcoffee$|\.coffee\.md$/; diff --git a/doc/api/fs.md b/doc/api/fs.md index 0a032426d5b4f0..5f1ea82b3fc8a6 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1313,6 +1313,7 @@ to be written. ```mjs import { writeFile } from 'fs/promises'; +import { Buffer } from 'buffer'; try { const controller = new AbortController(); @@ -3943,6 +3944,7 @@ If `data` is a normal object, it must have an own `toString` function property. ```mjs import { writeFile } from 'fs'; +import { Buffer } from 'buffer'; const data = new Uint8Array(Buffer.from('Hello Node.js')); writeFile('message.txt', data, (err) => { @@ -3973,6 +3975,7 @@ to be written. ```mjs import { writeFile } from 'fs'; +import { Buffer } from 'buffer'; const controller = new AbortController(); const { signal } = controller; @@ -3994,6 +3997,7 @@ calling `fs.write()` like: ```mjs import { write } from 'fs'; +import { Buffer } from 'buffer'; write(fd, Buffer.from(data, options.encoding), callback); ``` @@ -6499,6 +6503,7 @@ Example using an absolute path on POSIX: ```mjs import { open } from 'fs/promises'; +import { Buffer } from 'buffer'; let fd; try { diff --git a/doc/api/process.md b/doc/api/process.md index 07c212d273054c..9dd5d50808b6ae 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -5,12 +5,15 @@ -The `process` object is a `global` that provides information about, and control -over, the current Node.js process. As a global, it is always available to -Node.js applications without using `require()`. It can also be explicitly -accessed using `require()`: +The `process` object provides information about, and control over, the current +Node.js process. While it is available as a global, it is recommended to +explicitly access it via require or import: -```js +```mjs +import process from 'process'; +``` + +```cjs const process = require('process'); ``` @@ -38,7 +41,28 @@ termination, such as calling [`process.exit()`][] or uncaught exceptions. The `'beforeExit'` should *not* be used as an alternative to the `'exit'` event unless the intention is to schedule additional work. -```js +```mjs +import process from 'process'; + +process.on('beforeExit', (code) => { + console.log('Process beforeExit event with code: ', code); +}); + +process.on('exit', (code) => { + console.log('Process exit event with code: ', code); +}); + +console.log('This message is displayed first.'); + +// Prints: +// This message is displayed first. +// Process beforeExit event with code: 0 +// Process exit event with code: 0 +``` + +```cjs +const process = require('process'); + process.on('beforeExit', (code) => { console.log('Process beforeExit event with code: ', code); }); @@ -84,7 +108,17 @@ The listener callback function is invoked with the exit code specified either by the [`process.exitCode`][] property, or the `exitCode` argument passed to the [`process.exit()`][] method. -```js +```mjs +import process from 'process'; + +process.on('exit', (code) => { + console.log(`About to exit with code: ${code}`); +}); +``` + +```cjs +const process = require('process'); + process.on('exit', (code) => { console.log(`About to exit with code: ${code}`); }); @@ -95,7 +129,19 @@ process will exit immediately after calling the `'exit'` event listeners causing any additional work still queued in the event loop to be abandoned. In the following example, for instance, the timeout will never occur: -```js +```mjs +import process from 'process'; + +process.on('exit', (code) => { + setTimeout(() => { + console.log('This will not run'); + }, 0); +}); +``` + +```cjs +const process = require('process'); + process.on('exit', (code) => { setTimeout(() => { console.log('This will not run'); @@ -148,7 +194,38 @@ This is useful for tracking potential errors in an application while using the the occurrence of this event does not necessarily indicate an error. For example, [`Promise.race()`][] can trigger a `'multipleResolves'` event. -```js +```mjs +import process from 'process'; + +process.on('multipleResolves', (type, promise, reason) => { + console.error(type, promise, reason); + setImmediate(() => process.exit(1)); +}); + +async function main() { + try { + return await new Promise((resolve, reject) => { + resolve('First call'); + resolve('Swallowed resolve'); + reject(new Error('Swallowed reject')); + }); + } catch { + throw new Error('Failed'); + } +} + +main().then(console.log); +// resolve: Promise { 'First call' } 'Swallowed resolve' +// reject: Promise { 'First call' } Error: Swallowed reject +// at Promise (*) +// at new Promise () +// at main (*) +// First call +``` + +```cjs +const process = require('process'); + process.on('multipleResolves', (type, promise, reason) => { console.error(type, promise, reason); setImmediate(() => process.exit(1)); @@ -206,7 +283,21 @@ In asynchronous code, the `'unhandledRejection'` event is emitted when the list of unhandled rejections grows, and the `'rejectionHandled'` event is emitted when the list of unhandled rejections shrinks. -```js +```mjs +import process from 'process'; + +const unhandledRejections = new Map(); +process.on('unhandledRejection', (reason, promise) => { + unhandledRejections.set(promise, reason); +}); +process.on('rejectionHandled', (promise) => { + unhandledRejections.delete(promise); +}); +``` + +```cjs +const process = require('process'); + const unhandledRejections = new Map(); process.on('unhandledRejection', (reason, promise) => { unhandledRejections.set(promise, reason); @@ -250,7 +341,29 @@ behavior. Alternatively, change the [`process.exitCode`][] in the provided exit code. Otherwise, in the presence of such handler the process will exit with 0. -```js +```mjs +import process from 'process'; + +process.on('uncaughtException', (err, origin) => { + fs.writeSync( + process.stderr.fd, + `Caught exception: ${err}\n` + + `Exception origin: ${origin}` + ); +}); + +setTimeout(() => { + console.log('This will still run.'); +}, 500); + +// Intentionally cause an exception, but don't catch it. +nonexistentFunc(); +console.log('This will not run.'); +``` + +```cjs +const process = require('process'); + process.on('uncaughtException', (err, origin) => { fs.writeSync( process.stderr.fd, @@ -321,7 +434,21 @@ Installing an `'uncaughtExceptionMonitor'` listener does not change the behavior once an `'uncaughtException'` event is emitted. The process will still crash if no `'uncaughtException'` listener is installed. -```js +```mjs +import process from 'process'; + +process.on('uncaughtExceptionMonitor', (err, origin) => { + MyMonitoringTool.logSync(err, origin); +}); + +// Intentionally cause an exception, but don't catch it. +nonexistentFunc(); +// Still crashes Node.js +``` + +```cjs +const process = require('process'); + process.on('uncaughtExceptionMonitor', (err, origin) => { MyMonitoringTool.logSync(err, origin); }); @@ -356,7 +483,22 @@ are propagated through a `Promise` chain. The `'unhandledRejection'` event is useful for detecting and keeping track of promises that were rejected whose rejections have not yet been handled. -```js +```mjs +import process from 'process'; + +process.on('unhandledRejection', (reason, promise) => { + console.log('Unhandled Rejection at:', promise, 'reason:', reason); + // Application specific logging, throwing an error, or other logic here +}); + +somePromise.then((res) => { + return reportToUser(JSON.pasre(res)); // Note the typo (`pasre`) +}); // No `.catch()` or `.then()` +``` + +```cjs +const process = require('process'); + process.on('unhandledRejection', (reason, promise) => { console.log('Unhandled Rejection at:', promise, 'reason:', reason); // Application specific logging, throwing an error, or other logic here @@ -370,7 +512,21 @@ somePromise.then((res) => { The following will also trigger the `'unhandledRejection'` event to be emitted: -```js +```mjs +import process from 'process'; + +function SomeResource() { + // Initially set the loaded status to a rejected promise + this.loaded = Promise.reject(new Error('Resource not yet loaded!')); +} + +const resource = new SomeResource(); +// no .catch or .then on resource.loaded for at least a turn +``` + +```cjs +const process = require('process'); + function SomeResource() { // Initially set the loaded status to a rejected promise this.loaded = Promise.reject(new Error('Resource not yet loaded!')); @@ -406,7 +562,19 @@ are not part of the normal Node.js and JavaScript error handling flow. Node.js can emit warnings whenever it detects bad coding practices that could lead to sub-optimal application performance, bugs, or security vulnerabilities. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); // Print the warning name + console.warn(warning.message); // Print the warning message + console.warn(warning.stack); // Print the stack trace +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); // Print the warning name console.warn(warning.message); // Print the warning message @@ -511,7 +679,28 @@ The signal handler will receive the signal's name (`'SIGINT'`, The name of each event will be the uppercase common name for the signal (e.g. `'SIGINT'` for `SIGINT` signals). -```js +```mjs +import process from 'process'; + +// Begin reading from stdin so the process does not exit. +process.stdin.resume(); + +process.on('SIGINT', () => { + console.log('Received SIGINT. Press Control-D to exit.'); +}); + +// Using a single function to handle multiple signals +function handle(signal) { + console.log(`Received ${signal}`); +} + +process.on('SIGINT', handle); +process.on('SIGTERM', handle); +``` + +```cjs +const process = require('process'); + // Begin reading from stdin so the process does not exit. process.stdin.resume(); @@ -613,8 +802,21 @@ appear only *once*; each will begin with one or more dashes. Flags passed through to V8 will contain underscores instead of non-leading dashes: -```js -process.allowedNodeEnvironmentFlags.forEach((flag) => { +```mjs +import { allowedNodeEnvironmentFlags } from 'process'; + +allowedNodeEnvironmentFlags.forEach((flag) => { + // -r + // --inspect-brk + // --abort_on_uncaught_exception + // ... +}); +``` + +```cjs +const { allowedNodeEnvironmentFlags } = require('process'); + +allowedNodeEnvironmentFlags.forEach((flag) => { // -r // --inspect-brk // --abort_on_uncaught_exception @@ -641,7 +843,15 @@ The operating system CPU architecture for which the Node.js binary was compiled. Possible values are: `'arm'`, `'arm64'`, `'ia32'`, `'mips'`,`'mipsel'`, `'ppc'`, `'ppc64'`, `'s390'`, `'s390x'`, `'x32'`, and `'x64'`. -```js +```mjs +import { arch } from 'process'; + +console.log(`This processor architecture is ${arch}`); +``` + +```cjs +const { arch } = require('process'); + console.log(`This processor architecture is ${process.arch}`); ``` @@ -661,9 +871,20 @@ arguments. For example, assuming the following script for `process-args.js`: -```js +```mjs +import { argv } from 'process'; + +// print process.argv +argv.forEach((val, index) => { + console.log(`${index}: ${val}`); +}); +``` + +```cjs +const { argv } = require('process'); + // print process.argv -process.argv.forEach((val, index) => { +argv.forEach((val, index) => { console.log(`${index}: ${val}`); }); ``` @@ -753,11 +974,25 @@ The `process.chdir()` method changes the current working directory of the Node.js process or throws an exception if doing so fails (for instance, if the specified `directory` does not exist). -```js -console.log(`Starting directory: ${process.cwd()}`); +```mjs +import { chdir, cwd } from 'process'; + +console.log(`Starting directory: ${cwd()}`); try { - process.chdir('/tmp'); - console.log(`New directory: ${process.cwd()}`); + chdir('/tmp'); + console.log(`New directory: ${cwd()}`); +} catch (err) { + console.error(`chdir: ${err}`); +} +``` + +```cjs +const { chdir, cwd } = require('process'); + +console.log(`Starting directory: ${cwd()}`); +try { + chdir('/tmp'); + console.log(`New directory: ${cwd()}`); } catch (err) { console.error(`chdir: ${err}`); } @@ -855,15 +1090,31 @@ actual elapsed time if multiple CPU cores are performing work for this process. The result of a previous call to `process.cpuUsage()` can be passed as the argument to the function, to get a diff reading. -```js -const startUsage = process.cpuUsage(); +```mjs +import { cpuUsage } from 'process'; + +const startUsage = cpuUsage(); // { user: 38579, system: 6986 } // spin the CPU for 500 milliseconds const now = Date.now(); while (Date.now() - now < 500); -console.log(process.cpuUsage(startUsage)); +console.log(cpuUsage(startUsage)); +// { user: 514883, system: 11226 } +``` + +```cjs +const { cpuUsage } = require('process'); + +const startUsage = cpuUsage(); +// { user: 38579, system: 6986 } + +// spin the CPU for 500 milliseconds +const now = Date.now(); +while (Date.now() - now < 500); + +console.log(cpuUsage(startUsage)); // { user: 514883, system: 11226 } ``` @@ -877,8 +1128,16 @@ added: v0.1.8 The `process.cwd()` method returns the current working directory of the Node.js process. -```js -console.log(`Current directory: ${process.cwd()}`); +```mjs +import { cwd } from 'process'; + +console.log(`Current directory: ${cwd()}`); +``` + +```cjs +const { cwd } = require('process'); + +console.log(`Current directory: ${cwd()}`); ``` ## `process.debugPort` @@ -890,7 +1149,15 @@ added: v0.7.2 The port used by the Node.js debugger when enabled. -```js +```mjs +import process from 'process'; + +process.debugPort = 5858; +``` + +```cjs +const process = require('process'); + process.debugPort = 5858; ``` @@ -941,12 +1208,24 @@ that exports a `foo` function. All the symbols are loaded before the call returns, by passing the `RTLD_NOW` constant. In this example the constant is assumed to be available. -```js -const os = require('os'); -const path = require('path'); +```mjs +import { dlopen } from 'process'; +import { constants } from 'os'; +import { fileURLToPath } from 'url'; + +const module = { exports: {} }; +dlopen(module, fileURLToPath(new URL('local.node', import.meta.url)), + constants.dlopen.RTLD_NOW); +module.exports.foo(); +``` + +```cjs +const { dlopen } = require('process'); +const { constants } = require('os'); +const { join } = require('path'); + const module = { exports: {} }; -process.dlopen(module, path.join(__dirname, 'local.node'), - os.constants.dlopen.RTLD_NOW); +dlopen(module, join(__dirname, 'local.node'), constants.dlopen.RTLD_NOW); module.exports.foo(); ``` @@ -969,9 +1248,24 @@ The `process.emitWarning()` method can be used to emit custom or application specific process warnings. These can be listened for by adding a handler to the [`'warning'`][process_warning] event. -```js +```mjs +import { emitWarning } from 'process'; + +// Emit a warning with a code and additional detail. +emitWarning('Something happened!', { + code: 'MY_WARNING', + detail: 'This is some additional information' +}); +// Emits: +// (node:56338) [MY_WARNING] Warning: Something happened! +// This is some additional information +``` + +```cjs +const { emitWarning } = require('process'); + // Emit a warning with a code and additional detail. -process.emitWarning('Something happened!', { +emitWarning('Something happened!', { code: 'MY_WARNING', detail: 'This is some additional information' }); @@ -984,7 +1278,21 @@ In this example, an `Error` object is generated internally by `process.emitWarning()` and passed through to the [`'warning'`][process_warning] handler. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); // 'Warning' + console.warn(warning.message); // 'Something happened!' + console.warn(warning.code); // 'MY_WARNING' + console.warn(warning.stack); // Stack trace + console.warn(warning.detail); // 'This is some additional information' +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); // 'Warning' console.warn(warning.message); // 'Something happened!' @@ -1013,19 +1321,48 @@ The `process.emitWarning()` method can be used to emit custom or application specific process warnings. These can be listened for by adding a handler to the [`'warning'`][process_warning] event. -```js +```mjs +import { emitWarning } from 'process'; + // Emit a warning using a string. -process.emitWarning('Something happened!'); +emitWarning('Something happened!'); // Emits: (node: 56338) Warning: Something happened! ``` -```js +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using a string. +emitWarning('Something happened!'); +// Emits: (node: 56338) Warning: Something happened! +``` + +```mjs +import { emitWarning } from 'process'; + // Emit a warning using a string and a type. -process.emitWarning('Something Happened!', 'CustomWarning'); +emitWarning('Something Happened!', 'CustomWarning'); // Emits: (node:56338) CustomWarning: Something Happened! ``` -```js +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using a string and a type. +emitWarning('Something Happened!', 'CustomWarning'); +// Emits: (node:56338) CustomWarning: Something Happened! +``` + +```mjs +import { emitWarning } from 'process'; + +emitWarning('Something happened!', 'CustomWarning', 'WARN001'); +// Emits: (node:56338) [WARN001] CustomWarning: Something happened! +``` + +```cjs +const { emitWarning } = require('process'); + process.emitWarning('Something happened!', 'CustomWarning', 'WARN001'); // Emits: (node:56338) [WARN001] CustomWarning: Something happened! ``` @@ -1034,7 +1371,20 @@ In each of the previous examples, an `Error` object is generated internally by `process.emitWarning()` and passed through to the [`'warning'`][process_warning] handler. -```js +```mjs +import process from 'process'; + +process.on('warning', (warning) => { + console.warn(warning.name); + console.warn(warning.message); + console.warn(warning.code); + console.warn(warning.stack); +}); +``` + +```cjs +const process = require('process'); + process.on('warning', (warning) => { console.warn(warning.name); console.warn(warning.message); @@ -1047,14 +1397,29 @@ If `warning` is passed as an `Error` object, it will be passed through to the `'warning'` event handler unmodified (and the optional `type`, `code` and `ctor` arguments will be ignored): -```js +```mjs +import { emitWarning } from 'process'; + // Emit a warning using an Error object. const myWarning = new Error('Something happened!'); // Use the Error name property to specify the type name myWarning.name = 'CustomWarning'; myWarning.code = 'WARN001'; -process.emitWarning(myWarning); +emitWarning(myWarning); +// Emits: (node:56338) [WARN001] CustomWarning: Something happened! +``` + +```cjs +const { emitWarning } = require('process'); + +// Emit a warning using an Error object. +const myWarning = new Error('Something happened!'); +// Use the Error name property to specify the type name +myWarning.name = 'CustomWarning'; +myWarning.code = 'WARN001'; + +emitWarning(myWarning); // Emits: (node:56338) [WARN001] CustomWarning: Something happened! ``` @@ -1080,11 +1445,28 @@ As a best practice, warnings should be emitted only once per process. To do so, it is recommended to place the `emitWarning()` behind a simple boolean flag as illustrated in the example below: -```js +```mjs +import { emitWarning } from 'process'; + function emitMyWarning() { if (!emitMyWarning.warned) { emitMyWarning.warned = true; - process.emitWarning('Only warn once!'); + emitWarning('Only warn once!'); + } +} +emitMyWarning(); +// Emits: (node: 56339) Warning: Only warn once! +emitMyWarning(); +// Emits nothing +``` + +```cjs +const { emitWarning } = require('process'); + +function emitMyWarning() { + if (!emitMyWarning.warned) { + emitMyWarning.warned = true; + emitWarning('Only warn once!'); } } emitMyWarning(); @@ -1141,38 +1523,81 @@ $ node -e 'process.env.foo = "bar"' && echo $foo While the following will: -```js -process.env.foo = 'bar'; -console.log(process.env.foo); +```mjs +import { env } from 'process'; + +env.foo = 'bar'; +console.log(env.foo); +``` + +```cjs +const { env } = require('process'); + +env.foo = 'bar'; +console.log(env.foo); ``` Assigning a property on `process.env` will implicitly convert the value to a string. **This behavior is deprecated.** Future versions of Node.js may throw an error when the value is not a string, number, or boolean. -```js -process.env.test = null; -console.log(process.env.test); +```mjs +import { env } from 'process'; + +env.test = null; +console.log(env.test); // => 'null' -process.env.test = undefined; -console.log(process.env.test); +env.test = undefined; +console.log(env.test); +// => 'undefined' +``` + +```cjs +const { env } = require('process'); + +env.test = null; +console.log(env.test); +// => 'null' +env.test = undefined; +console.log(env.test); // => 'undefined' ``` Use `delete` to delete a property from `process.env`. -```js -process.env.TEST = 1; -delete process.env.TEST; -console.log(process.env.TEST); +```mjs +import { env } from 'process'; + +env.TEST = 1; +delete env.TEST; +console.log(env.TEST); +// => undefined +``` + +```cjs +const { env } = require('process'); + +env.TEST = 1; +delete env.TEST; +console.log(env.TEST); // => undefined ``` On Windows operating systems, environment variables are case-insensitive. -```js -process.env.TEST = 1; -console.log(process.env.test); +```mjs +import { env } from 'process'; + +env.TEST = 1; +console.log(env.test); +// => 1 +``` + +```cjs +const { env } = require('process'); + +env.TEST = 1; +console.log(env.test); // => 1 ``` @@ -1248,8 +1673,16 @@ called. To exit with a 'failure' code: -```js -process.exit(1); +```mjs +import { exit } from 'process'; + +exit(1); +``` + +```cjs +const { exit } = require('process'); + +exit(1); ``` The shell that executed Node.js should see the exit code as `1`. @@ -1268,11 +1701,23 @@ For instance, the following example illustrates a *misuse* of the `process.exit()` method that could lead to data printed to stdout being truncated and lost: -```js +```mjs +import { exit } from 'process'; + // This is an example of what *not* to do: if (someConditionNotMet()) { printUsageToStdout(); - process.exit(1); + exit(1); +} +``` + +```cjs +const { exit } = require('process'); + +// This is an example of what *not* to do: +if (someConditionNotMet()) { + printUsageToStdout(); + exit(1); } ``` @@ -1285,7 +1730,20 @@ Rather than calling `process.exit()` directly, the code *should* set the `process.exitCode` and allow the process to exit naturally by avoiding scheduling any additional work for the event loop: -```js +```mjs +import process from 'process'; + +// How to properly set the exit code while letting +// the process exit gracefully. +if (someConditionNotMet()) { + printUsageToStdout(); + process.exitCode = 1; +} +``` + +```cjs +const process = require('process'); + // How to properly set the exit code while letting // the process exit gracefully. if (someConditionNotMet()) { @@ -1323,7 +1781,17 @@ added: v2.0.0 The `process.getegid()` method returns the numerical effective group identity of the Node.js process. (See getegid(2).) -```js +```mjs +import process from 'process'; + +if (process.getegid) { + console.log(`Current gid: ${process.getegid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getegid) { console.log(`Current gid: ${process.getegid()}`); } @@ -1342,7 +1810,17 @@ added: v2.0.0 The `process.geteuid()` method returns the numerical effective user identity of the process. (See geteuid(2).) -```js +```mjs +import process from 'process'; + +if (process.geteuid) { + console.log(`Current uid: ${process.geteuid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.geteuid) { console.log(`Current uid: ${process.geteuid()}`); } @@ -1361,7 +1839,17 @@ added: v0.1.31 The `process.getgid()` method returns the numerical group identity of the process. (See getgid(2).) -```js +```mjs +import process from 'process'; + +if (process.getgid) { + console.log(`Current gid: ${process.getgid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getgid) { console.log(`Current gid: ${process.getgid()}`); } @@ -1381,7 +1869,17 @@ The `process.getgroups()` method returns an array with the supplementary group IDs. POSIX leaves it unspecified if the effective group ID is included but Node.js ensures it always is. -```js +```mjs +import process from 'process'; + +if (process.getgroups) { + console.log(process.getgroups()); // [ 16, 21, 297 ] +} +``` + +```cjs +const process = require('process'); + if (process.getgroups) { console.log(process.getgroups()); // [ 16, 21, 297 ] } @@ -1400,7 +1898,17 @@ added: v0.1.28 The `process.getuid()` method returns the numeric user identity of the process. (See getuid(2).) -```js +```mjs +import process from 'process'; + +if (process.getuid) { + console.log(`Current uid: ${process.getuid()}`); +} +``` + +```cjs +const process = require('process'); + if (process.getuid) { console.log(`Current uid: ${process.getuid()}`); } @@ -1446,13 +1954,31 @@ These times are relative to an arbitrary time in the past, and not related to the time of day and therefore not subject to clock drift. The primary use is for measuring performance between intervals: -```js +```mjs +import { hrtime } from 'process'; + +const NS_PER_SEC = 1e9; +const time = hrtime(); +// [ 1800216, 25 ] + +setTimeout(() => { + const diff = hrtime(time); + // [ 1, 552 ] + + console.log(`Benchmark took ${diff[0] * NS_PER_SEC + diff[1]} nanoseconds`); + // Benchmark took 1000000552 nanoseconds +}, 1000); +``` + +```cjs +const { hrtime } = require('process'); + const NS_PER_SEC = 1e9; -const time = process.hrtime(); +const time = hrtime(); // [ 1800216, 25 ] setTimeout(() => { - const diff = process.hrtime(time); + const diff = hrtime(time); // [ 1, 552 ] console.log(`Benchmark took ${diff[0] * NS_PER_SEC + diff[1]} nanoseconds`); @@ -1474,12 +2000,29 @@ Unlike [`process.hrtime()`][], it does not support an additional `time` argument since the difference can just be computed directly by subtraction of the two `bigint`s. -```js -const start = process.hrtime.bigint(); +```mjs +import { hrtime } from 'process'; + +const start = hrtime.bigint(); // 191051479007711n setTimeout(() => { - const end = process.hrtime.bigint(); + const end = hrtime.bigint(); + // 191052633396993n + + console.log(`Benchmark took ${end - start} nanoseconds`); + // Benchmark took 1154389282 nanoseconds +}, 1000); +``` + +```cjs +const { hrtime } = require('process'); + +const start = hrtime.bigint(); +// 191051479007711n + +setTimeout(() => { + const end = hrtime.bigint(); // 191052633396993n console.log(`Benchmark took ${end - start} nanoseconds`); @@ -1502,12 +2045,24 @@ access or the `CAP_SETGID` capability. Use care when dropping privileges: -```js -console.log(process.getgroups()); // [ 0 ] -process.initgroups('nodeuser', 1000); // switch user -console.log(process.getgroups()); // [ 27, 30, 46, 1000, 0 ] -process.setgid(1000); // drop root gid -console.log(process.getgroups()); // [ 27, 30, 46, 1000 ] +```mjs +import { getgroups, initgroups, setgid } from 'process'; + +console.log(getgroups()); // [ 0 ] +initgroups('nodeuser', 1000); // switch user +console.log(getgroups()); // [ 27, 30, 46, 1000, 0 ] +setgid(1000); // drop root gid +console.log(getgroups()); // [ 27, 30, 46, 1000 ] +``` + +```cjs +const { getgroups, initgroups, setgid } = require('process'); + +console.log(getgroups()); // [ 0 ] +initgroups('nodeuser', 1000); // switch user +console.log(getgroups()); // [ 27, 30, 46, 1000, 0 ] +setgid(1000); // drop root gid +console.log(getgroups()); // [ 27, 30, 46, 1000 ] ``` This function is only available on POSIX platforms (i.e. not Windows or @@ -1538,7 +2093,24 @@ Even though the name of this function is `process.kill()`, it is really just a signal sender, like the `kill` system call. The signal sent may do something other than kill the target process. -```js +```mjs +import process, { kill } from 'process'; + +process.on('SIGHUP', () => { + console.log('Got SIGHUP signal.'); +}); + +setTimeout(() => { + console.log('Exiting.'); + process.exit(0); +}, 100); + +kill(process.pid, 'SIGHUP'); +``` + +```cjs +const process = require('process'); + process.on('SIGHUP', () => { console.log('Got SIGHUP signal.'); }); @@ -1594,11 +2166,27 @@ changes: * `external` {integer} * `arrayBuffers` {integer} -Returns an object describing the memory usage of the Node.js process measured in -bytes. +Returns an object describing the memory usage of the Node.js process measured in +bytes. + +```mjs +import { memoryUsage } from 'process'; + +console.log(memoryUsage()); +// Prints: +// { +// rss: 4935680, +// heapTotal: 1826816, +// heapUsed: 650472, +// external: 49879, +// arrayBuffers: 9386 +// } +``` + +```cjs +const { memoryUsage } = require('process'); -```js -console.log(process.memoryUsage()); +console.log(memoryUsage()); // Prints: // { // rss: 4935680, @@ -1645,8 +2233,17 @@ process, including all C++ and JavaScript objects and code. This is the same value as the `rss` property provided by `process.memoryUsage()` but `process.memoryUsage.rss()` is faster. -```js -console.log(process.memoryUsage.rss()); +```mjs +import { memoryUsage } from 'process'; + +console.log(memoryUsage.rss()); +// 35655680 +``` + +```cjs +const { rss } = require('process'); + +console.log(memoryUsage.rss()); // 35655680 ``` @@ -1668,9 +2265,25 @@ completion and before the event loop is allowed to continue. It's possible to create an infinite loop if one were to recursively call `process.nextTick()`. See the [Event Loop][] guide for more background. -```js +```mjs +import { nextTick } from 'process'; + +console.log('start'); +nextTick(() => { + console.log('nextTick callback'); +}); +console.log('scheduled'); +// Output: +// start +// scheduled +// nextTick callback +``` + +```cjs +const { nextTick } = require('process'); + console.log('start'); -process.nextTick(() => { +nextTick(() => { console.log('nextTick callback'); }); console.log('scheduled'); @@ -1684,11 +2297,30 @@ This is important when developing APIs in order to give users the opportunity to assign event handlers *after* an object has been constructed but before any I/O has occurred: -```js +```mjs +import { nextTick } from 'process'; + +function MyThing(options) { + this.setupOptions(options); + + nextTick(() => { + this.startDoingStuff(); + }); +} + +const thing = new MyThing(); +thing.getReadyForStuff(); + +// thing.startDoingStuff() gets called now, not before. +``` + +```cjs +const { nextTick } = require('process'); + function MyThing(options) { this.setupOptions(options); - process.nextTick(() => { + nextTick(() => { this.startDoingStuff(); }); } @@ -1730,10 +2362,25 @@ It is not clear whether `foo()` or `bar()` will be called first. The following approach is much better: -```js +```mjs +import { nextTick } from 'process'; + +function definitelyAsync(arg, cb) { + if (arg) { + nextTick(cb); + return; + } + + fs.stat('file', cb); +} +``` + +```cjs +const { nextTick } = require('process'); + function definitelyAsync(arg, cb) { if (arg) { - process.nextTick(cb); + nextTick(cb); return; } @@ -1749,10 +2396,24 @@ execute the then, catch, and finally handlers of resolved promises. Within Node.js, every time the "next tick queue" is drained, the microtask queue is drained immediately after. -```js +```mjs +import { nextTick } from 'process'; + +Promise.resolve().then(() => console.log(2)); +queueMicrotask(() => console.log(3)); +nextTick(() => console.log(1)); +// Output: +// 1 +// 2 +// 3 +``` + +```cjs +const { nextTick } = require('process'); + Promise.resolve().then(() => console.log(2)); queueMicrotask(() => console.log(3)); -process.nextTick(() => console.log(1)); +nextTick(() => console.log(1)); // Output: // 1 // 2 @@ -1827,8 +2488,16 @@ added: v0.1.15 The `process.pid` property returns the PID of the process. -```js -console.log(`This process is pid ${process.pid}`); +```mjs +import { pid } from 'process'; + +console.log(`This process is pid ${pid}`); +``` + +```cjs +const { pid } = require('process'); + +console.log(`This process is pid ${pid}`); ``` ## `process.platform` @@ -1851,8 +2520,16 @@ Currently possible values are: * `'sunos'` * `'win32'` -```js -console.log(`This platform is ${process.platform}`); +```mjs +import { platform } from 'process'; + +console.log(`This platform is ${platform}`); +``` + +```cjs +const { platform } = require('process'); + +console.log(`This platform is ${platform}`); ``` The value `'android'` may also be returned if the Node.js is built on the @@ -1872,8 +2549,16 @@ added: The `process.ppid` property returns the PID of the parent of the current process. -```js -console.log(`The parent process is pid ${process.ppid}`); +```mjs +import { ppid } from 'process'; + +console.log(`The parent process is pid ${ppid}`); +``` + +```cjs +const { ppid } = require('process'); + +console.log(`The parent process is pid ${ppid}`); ``` ## `process.release` @@ -1958,8 +2643,16 @@ Write reports in a compact format, single-line JSON, more easily consumable by log processing systems than the default multi-line format designed for human consumption. -```js -console.log(`Reports are compact? ${process.report.compact}`); +```mjs +import { report } from 'process'; + +console.log(`Reports are compact? ${report.compact}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Reports are compact? ${report.compact}`); ``` ### `process.report.directory` @@ -1979,8 +2672,16 @@ Directory where the report is written. The default value is the empty string, indicating that reports are written to the current working directory of the Node.js process. -```js -console.log(`Report directory is ${process.report.directory}`); +```mjs +import { report } from 'process'; + +console.log(`Report directory is ${report.directory}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report directory is ${report.directory}`); ``` ### `process.report.filename` @@ -2000,8 +2701,16 @@ Filename where the report is written. If set to the empty string, the output filename will be comprised of a timestamp, PID, and sequence number. The default value is the empty string. -```js -console.log(`Report filename is ${process.report.filename}`); +```mjs +import { report } from 'process'; + +console.log(`Report filename is ${report.filename}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report filename is ${report.filename}`); ``` ### `process.report.getReport([err])` @@ -2022,8 +2731,21 @@ Returns a JavaScript Object representation of a diagnostic report for the running process. The report's JavaScript stack trace is taken from `err`, if present. -```js -const data = process.report.getReport(); +```mjs +import { report } from 'process'; + +const data = report.getReport(); +console.log(data.header.nodejsVersion); + +// Similar to process.report.writeReport() +import fs from 'fs'; +fs.writeFileSync('my-report.log', util.inspect(data), 'utf8'); +``` + +```cjs +const { report } = require('process'); + +const data = report.getReport(); console.log(data.header.nodejsVersion); // Similar to process.report.writeReport() @@ -2048,8 +2770,16 @@ changes: If `true`, a diagnostic report is generated on fatal errors, such as out of memory errors or failed C++ assertions. -```js -console.log(`Report on fatal error: ${process.report.reportOnFatalError}`); +```mjs +import { report } from 'process'; + +console.log(`Report on fatal error: ${report.reportOnFatalError}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on fatal error: ${report.reportOnFatalError}`); ``` ### `process.report.reportOnSignal` @@ -2068,8 +2798,16 @@ changes: If `true`, a diagnostic report is generated when the process receives the signal specified by `process.report.signal`. -```js -console.log(`Report on signal: ${process.report.reportOnSignal}`); +```mjs +import { report } from 'process'; + +console.log(`Report on signal: ${report.reportOnSignal}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on signal: ${report.reportOnSignal}`); ``` ### `process.report.reportOnUncaughtException` @@ -2087,8 +2825,16 @@ changes: If `true`, a diagnostic report is generated on uncaught exception. -```js -console.log(`Report on exception: ${process.report.reportOnUncaughtException}`); +```mjs +import { report } from 'process'; + +console.log(`Report on exception: ${report.reportOnUncaughtException}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report on exception: ${report.reportOnUncaughtException}`); ``` ### `process.report.signal` @@ -2107,8 +2853,16 @@ changes: The signal used to trigger the creation of a diagnostic report. Defaults to `'SIGUSR2'`. -```js -console.log(`Report signal: ${process.report.signal}`); +```mjs +import { report } from 'process'; + +console.log(`Report signal: ${report.signal}`); +``` + +```cjs +const { report } = require('process'); + +console.log(`Report signal: ${report.signal}`); ``` ### `process.report.writeReport([filename][, err])` @@ -2134,8 +2888,16 @@ Writes a diagnostic report to a file. If `filename` is not provided, the default filename includes the date, time, PID, and a sequence number. The report's JavaScript stack trace is taken from `err`, if present. -```js -process.report.writeReport(); +```mjs +import { report } from 'process'; + +report.writeReport(); +``` + +```cjs +const { report } = require('process'); + +report.writeReport(); ``` Additional documentation is available in the [report documentation][]. @@ -2188,8 +2950,37 @@ added: v12.6.0 process becoming runnable or because the current process exceeded its time slice. This field is not supported on Windows. -```js -console.log(process.resourceUsage()); +```mjs +import { resourceUsage } from 'process'; + +console.log(resourceUsage()); +/* + Will output: + { + userCPUTime: 82872, + systemCPUTime: 4143, + maxRSS: 33164, + sharedMemorySize: 0, + unsharedDataSize: 0, + unsharedStackSize: 0, + minorPageFault: 2469, + majorPageFault: 0, + swappedOut: 0, + fsRead: 0, + fsWrite: 8, + ipcSent: 0, + ipcReceived: 0, + signalsCount: 0, + voluntaryContextSwitches: 79, + involuntaryContextSwitches: 1 + } +*/ +``` + +```cjs +const { resourceUsage } = require('process'); + +console.log(resourceUsage()); /* Will output: { @@ -2250,7 +3041,23 @@ The `process.setegid()` method sets the effective group identity of the process. name string. If a group name is specified, this method blocks while resolving the associated a numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getegid && process.setegid) { + console.log(`Current gid: ${process.getegid()}`); + try { + process.setegid(501); + console.log(`New gid: ${process.getegid()}`); + } catch (err) { + console.log(`Failed to set gid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getegid && process.setegid) { console.log(`Current gid: ${process.getegid()}`); try { @@ -2278,7 +3085,23 @@ The `process.seteuid()` method sets the effective user identity of the process. string. If a username is specified, the method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.geteuid && process.seteuid) { + console.log(`Current uid: ${process.geteuid()}`); + try { + process.seteuid(501); + console.log(`New uid: ${process.geteuid()}`); + } catch (err) { + console.log(`Failed to set uid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.geteuid && process.seteuid) { console.log(`Current uid: ${process.geteuid()}`); try { @@ -2306,7 +3129,23 @@ setgid(2).) The `id` can be passed as either a numeric ID or a group name string. If a group name is specified, this method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getgid && process.setgid) { + console.log(`Current gid: ${process.getgid()}`); + try { + process.setgid(501); + console.log(`New gid: ${process.getgid()}`); + } catch (err) { + console.log(`Failed to set gid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getgid && process.setgid) { console.log(`Current gid: ${process.getgid()}`); try { @@ -2335,7 +3174,22 @@ process to have `root` or the `CAP_SETGID` capability. The `groups` array can contain numeric group IDs, group names, or both. -```js +```mjs +import process from 'process'; + +if (process.getgroups && process.setgroups) { + try { + process.setgroups([501]); + console.log(process.getgroups()); // new groups + } catch (err) { + console.log(`Failed to set groups: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getgroups && process.setgroups) { try { process.setgroups([501]); @@ -2362,7 +3216,23 @@ setuid(2).) The `id` can be passed as either a numeric ID or a username string. If a username is specified, the method blocks while resolving the associated numeric ID. -```js +```mjs +import process from 'process'; + +if (process.getuid && process.setuid) { + console.log(`Current uid: ${process.getuid()}`); + try { + process.setuid(501); + console.log(`New uid: ${process.getuid()}`); + } catch (err) { + console.log(`Failed to set uid: ${err}`); + } +} +``` + +```cjs +const process = require('process'); + if (process.getuid && process.setuid) { console.log(`Current uid: ${process.getuid()}`); try { @@ -2461,8 +3331,16 @@ a [Writable][] stream. For example, to copy `process.stdin` to `process.stdout`: -```js -process.stdin.pipe(process.stdout); +```mjs +import { stdin, stdout } from 'process'; + +stdin.pipe(stdout); +``` + +```cjs +const { stdin, stdout } = require('process'); + +stdin.pipe(stdout); ``` `process.stdout` differs from other Node.js streams in important ways. See @@ -2621,9 +3499,21 @@ added: v0.1.19 `process.umask(mask)` sets the Node.js process's file mode creation mask. Child processes inherit the mask from the parent process. Returns the previous mask. -```js +```mjs +import { umask } from 'process'; + +const newmask = 0o022; +const oldmask = umask(newmask); +console.log( + `Changed umask from ${oldmask.toString(8)} to ${newmask.toString(8)}` +); +``` + +```cjs +const { umask } = require('process'); + const newmask = 0o022; -const oldmask = process.umask(newmask); +const oldmask = umask(newmask); console.log( `Changed umask from ${oldmask.toString(8)} to ${newmask.toString(8)}` ); @@ -2653,8 +3543,17 @@ added: v0.1.3 The `process.version` property contains the Node.js version string. -```js -console.log(`Version: ${process.version}`); +```mjs +import { version } from 'process'; + +console.log(`Version: ${version}`); +// Version: v14.8.0 +``` + +```cjs +const { version } = require('process'); + +console.log(`Version: ${version}`); // Version: v14.8.0 ``` @@ -2680,8 +3579,16 @@ Node.js and its dependencies. `process.versions.modules` indicates the current ABI version, which is increased whenever a C++ API changes. Node.js will refuse to load modules that were compiled against a different module ABI version. -```js -console.log(process.versions); +```mjs +import { versions } from 'process'; + +console.log(versions); +``` + +```cjs +const { versions } = require('process'); + +console.log(versions); ``` Will generate an object similar to: diff --git a/doc/api/wasi.md b/doc/api/wasi.md index d5213b9c9eb460..58c310edd82585 100644 --- a/doc/api/wasi.md +++ b/doc/api/wasi.md @@ -13,10 +13,11 @@ underlying operating system via a collection of POSIX-like functions. ```mjs import fs from 'fs'; import { WASI } from 'wasi'; +import { argv, env } from 'process'; const wasi = new WASI({ - args: process.argv, - env: process.env, + args: argv, + env, preopens: { '/sandbox': '/some/real/path/that/wasm/can/access' } @@ -33,9 +34,11 @@ wasi.start(instance); 'use strict'; const fs = require('fs'); const { WASI } = require('wasi'); +const { argv, env } = require('process'); + const wasi = new WASI({ - args: process.argv, - env: process.env, + args: argv, + env, preopens: { '/sandbox': '/some/real/path/that/wasm/can/access' } From 48d9680f849a87b49784c71c5552319f4d9d582b Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 18 Jun 2021 18:52:37 -0700 Subject: [PATCH 009/133] debugger: remove final lint exceptions in inspect_repl.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adding a return when it's not really a getter is kind of misleading, but so is using a getter for something that doesn't return anything, so ¯\_(ツ)_/¯. PR-URL: https://github.com/nodejs/node/pull/39078 Reviewed-By: Antoine du Hamel --- lib/internal/debugger/inspect_repl.js | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/lib/internal/debugger/inspect_repl.js b/lib/internal/debugger/inspect_repl.js index 5393cb32718300..a93a8dfecf2720 100644 --- a/lib/internal/debugger/inspect_repl.js +++ b/lib/internal/debugger/inspect_repl.js @@ -1,6 +1,3 @@ -// TODO(trott): enable ESLint -/* eslint-disable getter-return */ - 'use strict'; const { @@ -899,7 +896,7 @@ function createRepl(inspector) { copyOwnProperties(context, { get help() { - print(HELP); + return print(HELP); }, get run() { @@ -1078,7 +1075,7 @@ function createRepl(inspector) { repl.setPrompt('> '); print('Press Ctrl+C to leave debug repl'); - repl.displayPrompt(); + return repl.displayPrompt(); }, get version() { From 90ec7660bc3c8b1625db75fa78fccc64f282b9c0 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 18 Jun 2021 23:15:50 -0700 Subject: [PATCH 010/133] doc: update AUTHORS file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39082 Reviewed-By: Michaël Zasso Reviewed-By: Nitzan Uziely --- .mailmap | 8 +++ AUTHORS | 182 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 188 insertions(+), 2 deletions(-) diff --git a/.mailmap b/.mailmap index 4597ae9e47f61d..c63bd511e6b5d9 100644 --- a/.mailmap +++ b/.mailmap @@ -63,6 +63,7 @@ Beth Griggs Bethany Griggs Beth Griggs Bethany N Griggs Beth Griggs BethGriggs Bidisha Pyne +bl-ue bl-ue <54780737+bl-ue@users.noreply.github.com> Brad Decker brad-decker Brad Larson BradLarson Bradley Meck Bradley Farias @@ -103,8 +104,10 @@ Daniel Paulino dpaulino Daniel Pihlström Daniel Wang firedfox Daniel Wang firedfox +Danielle Adams Danielle Adams Danny Nemer Danny Nemer +Darshan Sen Dave Pacheco David Cai DavidCai David Mark Clements @@ -207,6 +210,7 @@ Josh Erickson Josh Hunter jopann Joshua S. Weinstein Joyee Cheung joyeecheung +Joyee Cheung Joyee Cheung Juan Soto Julien Klepatch jklepatch Julien Waechter julien.waechter @@ -294,6 +298,7 @@ Nicholas Kinsey Nick Soggin Nikolai Vavilov Nils Kuhnhenn +Nitzan Uziely Nitzan Uziely Noah Rose Ledesma Noah Rose Noah Rose Ledesma Oluwaseun Omoyajowo @@ -356,6 +361,7 @@ Segu Riluvan Sergey Kryzhanovsky Shannen Saez Shaopeng Zhang szhang351 +Shelley Vohr Shelley Vohr Shigeki Ohtsu Shigeki Ohtsu Shivang Saxena @@ -371,6 +377,7 @@ Sreepurna Jasti sreepurnajasti Stefan Budeanu Stefan Bühler +Stephen Belanger Stephen Belanger Steve Mao Steven R. Loomis @@ -426,6 +433,7 @@ Ujjwal Sharma Viktor Karpov vitkarpov Vincent Voyer Vladimir de Turckheim +Voltrex Voltrex <62040526+VoltrexMaster@users.noreply.github.com> vsemozhetbyt Vse Mozhet Byt Wang Xinyong Weijia Wang <381152119@qq.com> diff --git a/AUTHORS b/AUTHORS index a61c4010be5d3c..85c715a5838214 100644 --- a/AUTHORS +++ b/AUTHORS @@ -784,7 +784,7 @@ Sven Slootweg Dmitry Vasilyev Malcolm Ahoy Imran Iqbal -Stewart X Addison +Stewart X Addison Matt Harrison Christopher J. Brody Salman Aljammaz @@ -3003,7 +3003,7 @@ Conor ONeill tsabolov Swagat Konchada Yuhanun Citgez -Danielle Adams +Danielle Adams Andrey Pechkurov Jeff simon @@ -3139,5 +3139,183 @@ Shigma <33423008+Shigma@users.noreply.github.com> atian25@qq.com Amila Welihinda schamberg97 <50446906+schamberg97@users.noreply.github.com> +DrunkenPoney +Christoph Tavan +Clark Kozak +Michael Auderer +Linn Dahlgren +Ikko Ashimine +Anatoly Korniltsev +Victor Antonio Barzana Crespo +Matthieu Larcher +anlex N <1293006794@qq.com> +ThakurKarthik +Aastha Gupta +Yohanan Baruchel +Dmitry Gozman +Daniil Demidovich +Hussaina Begum Nandyala +Danny Sonnenschein +Sourav Shaw +H Adinarayana +lucasg +Brian 'bdougie' Douglas +Lee, Bonggi +Momtchil Momtchev +Josh Dague +Vincent Boivin +ax1 <16510021+ax1@users.noreply.github.com> +Shubham Parihar <51517103+iShibi@users.noreply.github.com> +Darshan Sen +Matthew Francis Brunetti +Chris Opperwall +Takuya Noguchi +tyankatsu +Ben Turner <7623873+ben-turner@users.noreply.github.com> +Bryan Field +krank2me +masx200 <34191203+masx200@users.noreply.github.com> +Baruch Odem (Rothkoff) +Mattias Runge-Broberg +Szymon Marczak <36894700+szmarczak@users.noreply.github.com> +Dmitry Semigradsky +Ole André Vadla Ravnås +rickyes <0x19951125@gmail.com> +Aleksandr Krutko +Brian Ingenito <28159742+bingenito@users.noreply.github.com> +FeelyChau +Darcy Clarke +mayank agarwal +woodfairy +Nikola Glavina +Rishabh Mehan +Anna Henningsen +Andrew Casey +Anders Kaseorg +Hollow Man +nlf +naortedgi +Narasimha Prasanna HN +Zijian Liu +inokawa <48897392+inokawa@users.noreply.github.com> +Michael Bashurov +Moshe vilner +Nicolai Stange +kai zhu +FrankQiu +Rock +Chinmoy Chakraborty +Maksym Baranovskyi +Michael Chen <4326639+mcgitty@users.noreply.github.com> +François-Denis Gonthier +Dr +Nitzan Uziely +Adrien Maret +Thiago Padilha +Joseph Hackman +Pranshu Jethmalani +Rohan Chougule +Mohamed Kamagate +Ajay Poshak +Isaac Levy +ugultopu +Nicholas Schamberg +Dimitris Halatsis +Mattia Pontonio <44380480+mattiapontonio@users.noreply.github.com> +Milad Fa +Emil Sivervik +alexbs +Ian Storm Taylor +Carlos Fuentes +Tyler Ang-Wanek +Matthew Mario Di Pasquale +ttzztztz +Romuald Brillout +Dave Cardwell +Akash Negi <55234838+NegiAkash890@users.noreply.github.com> +James Addison +Fabian Cook +Kalvin Vasconcellos +marsonya +Qingyu Deng +Matin Zadehdolatabad +Daniel Clark +Sajal Khandelwal +Cheng Liu +Utku Gultopu +Jay Tailor <60511316+JayvaScript@users.noreply.github.com> +Greg Ziskind +Dan Čermák +ttzztztz +Vít Ondruch +humanwebpl <58517331+humanwebpl@users.noreply.github.com> +Dawid Rusnak +obi-el +Merlin Luntke <22600241+Luntke@users.noreply.github.com> +Marko Kaznovac +Gabriel Schulhof +Ian Kerins +dbachko +Mattias Buelens +Dylan Elliott +Wassim Chegham +simov +wwwzbwcom +David Glasser +pezhmanparsaee +Hassaan Pasha +Darkripper214 +Anu Pasumarthy +HiroyukiYagihashi +Arkerone +Voltrex +ycjcl868 +Serkan Özel +Ferdi +eladkeyshawn +luyahan +Simon Knott +Siddharth +Cactysman +David Brownman +Michael Rommel +Chengzhong Wu +Andres +Jayden Seric +divlo +Rohit Gohri +Giora Guttsait +takayama +Rafael Gonzaga +Arnold Zokas +Nils Dralle +Jesse Chan +helloyou2012 +MrJithil +Rodolfo Carvalho +Jordan Baczuk +moander +Hitesh Sharma +Andreas Schwab +Moritz Kneilmann +fisker Cheung +Issam E. Maghni +TodorTotev <51530311+TodorTotev@users.noreply.github.com> +Wael Almattar +yotamselementor <83912471+yotamselementor@users.noreply.github.com> +pengjie <37610029@qq.com> +Philip +julianjany <54538266+julianjany@users.noreply.github.com> +bl-ue +npm-robot +Shaun Keys +Simone Busoli +ycjcl868 <45808948@qq.com> +Qingyu Deng +Derevianchenko Maksym <32910350+maks-white@users.noreply.github.com> +RA80533 <32469082+RA80533@users.noreply.github.com> +Mao Wtm +Houssem Chebab +Davidson Francis # Generated by tools/update-authors.js From ea8d83bf596c1285ed6288281f6fb1bb0f0a6f75 Mon Sep 17 00:00:00 2001 From: XadillaX Date: Thu, 3 Jun 2021 14:27:32 +0800 Subject: [PATCH 011/133] src,crypto: fix 0-length output crash in webcrypto MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: https://github.com/nodejs/node/issues/38883 PR-URL: https://github.com/nodejs/node/pull/38913 Refs: https://github.com/nodejs/node/issues/38883 Reviewed-By: Tobias Nießen --- src/crypto/crypto_aes.cc | 12 +++++- src/crypto/crypto_cipher.h | 9 +++-- .../test-crypto-subtle-zero-length.js | 39 +++++++++++++++++++ 3 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 test/parallel/test-crypto-subtle-zero-length.js diff --git a/src/crypto/crypto_aes.cc b/src/crypto/crypto_aes.cc index edca8fbd0dcb5c..d6f6393df6c878 100644 --- a/src/crypto/crypto_aes.cc +++ b/src/crypto/crypto_aes.cc @@ -130,7 +130,17 @@ WebCryptoCipherStatus AES_Cipher( ByteSource buf = ByteSource::Allocated(data, buf_len); unsigned char* ptr = reinterpret_cast(data); - if (!EVP_CipherUpdate( + // In some outdated version of OpenSSL (e.g. + // ubi81_sharedlibs_openssl111fips_x64) may be used in sharedlib mode, the + // logic will be failed when input size is zero. The newly OpenSSL has fixed + // it up. But we still have to regard zero as special in Node.js code to + // prevent old OpenSSL failure. + // + // Refs: https://github.com/openssl/openssl/commit/420cb707b880e4fb649094241371701013eeb15f + // Refs: https://github.com/nodejs/node/pull/38913#issuecomment-866505244 + if (in.size() == 0) { + out_len = 0; + } else if (!EVP_CipherUpdate( ctx.get(), ptr, &out_len, diff --git a/src/crypto/crypto_cipher.h b/src/crypto/crypto_cipher.h index c8dd3e48f718fd..b9b850a1d64c8b 100644 --- a/src/crypto/crypto_cipher.h +++ b/src/crypto/crypto_cipher.h @@ -249,16 +249,17 @@ class CipherJob final : public CryptoJob { v8::Local* result) override { Environment* env = AsyncWrap::env(); CryptoErrorStore* errors = CryptoJob::errors(); - if (out_.size() > 0) { + + if (errors->Empty()) + errors->Capture(); + + if (out_.size() > 0 || errors->Empty()) { CHECK(errors->Empty()); *err = v8::Undefined(env->isolate()); *result = out_.ToArrayBuffer(env); return v8::Just(!result->IsEmpty()); } - if (errors->Empty()) - errors->Capture(); - CHECK(!errors->Empty()); *result = v8::Undefined(env->isolate()); return v8::Just(errors->ToException(env).ToLocal(err)); } diff --git a/test/parallel/test-crypto-subtle-zero-length.js b/test/parallel/test-crypto-subtle-zero-length.js new file mode 100644 index 00000000000000..ffca84cf56129e --- /dev/null +++ b/test/parallel/test-crypto-subtle-zero-length.js @@ -0,0 +1,39 @@ +'use strict'; + +const common = require('../common'); + +if (!common.hasCrypto) + common.skip('missing crypto'); + +const assert = require('assert'); +const crypto = require('crypto').webcrypto; + +(async () => { + const k = await crypto.subtle.importKey( + 'raw', + new Uint8Array(32), + { name: 'AES-GCM' }, + false, + [ 'encrypt', 'decrypt' ]); + assert(k instanceof crypto.CryptoKey); + + const e = await crypto.subtle.encrypt({ + name: 'AES-GCM', + iv: new Uint8Array(12), + }, k, new Uint8Array(0)); + assert(e instanceof ArrayBuffer); + assert.deepStrictEqual( + Buffer.from(e), + Buffer.from([ + 0x53, 0x0f, 0x8a, 0xfb, 0xc7, 0x45, 0x36, 0xb9, + 0xa9, 0x63, 0xb4, 0xf1, 0xc4, 0xcb, 0x73, 0x8b ])); + + const v = await crypto.subtle.decrypt({ + name: 'AES-GCM', + iv: new Uint8Array(12), + }, k, e); + assert(v instanceof ArrayBuffer); + assert.strictEqual(v.byteLength, 0); +})().then(common.mustCall()).catch((e) => { + assert.ifError(e); +}); From 35331cbd1350a64b5ec5daf1e3ae7e096b7e6bd3 Mon Sep 17 00:00:00 2001 From: Qingyu Deng Date: Thu, 17 Jun 2021 22:29:03 +0800 Subject: [PATCH 012/133] http,https: align server option of https with http MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes: https://github.com/nodejs/node/issues/38954 PR-URL: https://github.com/nodejs/node/pull/38992 Refs: https://github.com/nodejs/node/pull/30570 Reviewed-By: Anna Henningsen Reviewed-By: Michaël Zasso --- lib/_http_server.js | 28 ++-- lib/https.js | 8 +- .../test-https-insecure-parse-per-stream.js | 131 ++++++++++++++++++ .../test-https-max-header-size-per-stream.js | 119 ++++++++++++++++ 4 files changed, 268 insertions(+), 18 deletions(-) create mode 100644 test/parallel/test-https-insecure-parse-per-stream.js create mode 100644 test/parallel/test-https-max-header-size-per-stream.js diff --git a/lib/_http_server.js b/lib/_http_server.js index 97df58a007daba..11119169d56f2c 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -353,18 +353,7 @@ function writeHead(statusCode, reason, obj) { // Docs-only deprecated: DEP0063 ServerResponse.prototype.writeHeader = ServerResponse.prototype.writeHead; -function Server(options, requestListener) { - if (!(this instanceof Server)) return new Server(options, requestListener); - - if (typeof options === 'function') { - requestListener = options; - options = {}; - } else if (options == null || typeof options === 'object') { - options = { ...options }; - } else { - throw new ERR_INVALID_ARG_TYPE('options', 'object', options); - } - +function storeHTTPOptions(options) { this[kIncomingMessage] = options.IncomingMessage || IncomingMessage; this[kServerResponse] = options.ServerResponse || ServerResponse; @@ -377,7 +366,21 @@ function Server(options, requestListener) { if (insecureHTTPParser !== undefined) validateBoolean(insecureHTTPParser, 'options.insecureHTTPParser'); this.insecureHTTPParser = insecureHTTPParser; +} + +function Server(options, requestListener) { + if (!(this instanceof Server)) return new Server(options, requestListener); + + if (typeof options === 'function') { + requestListener = options; + options = {}; + } else if (options == null || typeof options === 'object') { + options = { ...options }; + } else { + throw new ERR_INVALID_ARG_TYPE('options', 'object', options); + } + storeHTTPOptions.call(this, options); net.Server.call(this, { allowHalfOpen: true }); if (requestListener) { @@ -991,6 +994,7 @@ module.exports = { STATUS_CODES, Server, ServerResponse, + storeHTTPOptions, _connectionListener: connectionListener, kServerResponse }; diff --git a/lib/https.js b/lib/https.js index 765e1a22b60696..695a9020994852 100644 --- a/lib/https.js +++ b/lib/https.js @@ -40,16 +40,14 @@ const tls = require('tls'); const { Agent: HttpAgent } = require('_http_agent'); const { Server: HttpServer, + storeHTTPOptions, _connectionListener, - kServerResponse } = require('_http_server'); const { ClientRequest } = require('_http_client'); let debug = require('internal/util/debuglog').debuglog('https', (fn) => { debug = fn; }); const { URL, urlToHttpOptions, searchParamsSymbol } = require('internal/url'); -const { IncomingMessage, ServerResponse } = require('http'); -const { kIncomingMessage } = require('_http_common'); function Server(opts, requestListener) { if (!(this instanceof Server)) return new Server(opts, requestListener); @@ -67,9 +65,7 @@ function Server(opts, requestListener) { opts.ALPNProtocols = ['http/1.1']; } - this[kIncomingMessage] = opts.IncomingMessage || IncomingMessage; - this[kServerResponse] = opts.ServerResponse || ServerResponse; - + FunctionPrototypeCall(storeHTTPOptions, this, opts); FunctionPrototypeCall(tls.Server, this, opts, _connectionListener); this.httpAllowHalfOpen = false; diff --git a/test/parallel/test-https-insecure-parse-per-stream.js b/test/parallel/test-https-insecure-parse-per-stream.js new file mode 100644 index 00000000000000..645fbcf2637654 --- /dev/null +++ b/test/parallel/test-https-insecure-parse-per-stream.js @@ -0,0 +1,131 @@ +'use strict'; +const common = require('../common'); +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const fixtures = require('../common/fixtures'); +const assert = require('assert'); +const https = require('https'); +const MakeDuplexPair = require('../common/duplexpair'); +const tls = require('tls'); +const { finished } = require('stream'); + +const certFixture = { + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem'), + ca: fixtures.readKey('ca1-cert.pem'), +}; + + +// Test that setting the `insecureHTTPParse` option works on a per-stream-basis. + +// Test 1: The server sends an invalid header. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + rejectUnauthorized: false, + createConnection: common.mustCall(() => clientSide), + insecureHTTPParser: true + }, common.mustCall((res) => { + assert.strictEqual(res.headers.hello, 'foo\x08foo'); + res.resume(); // We don’t actually care about contents. + res.on('end', common.mustCall()); + })); + req.end(); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: foo\x08foo\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 2: The same as Test 1 except without the option, to make sure it fails. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + rejectUnauthorized: false, + createConnection: common.mustCall(() => clientSide) + }, common.mustNotCall()); + req.end(); + req.on('error', common.mustCall()); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: foo\x08foo\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 3: The client sends an invalid header. +{ + const testData = 'Hello, World!\n'; + const server = https.createServer( + { insecureHTTPParser: true, + ...certFixture }, + common.mustCall((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end(testData); + })); + + server.on('clientError', common.mustNotCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: foo\x08foo\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 4: The same as Test 3 except without the option, to make sure it fails. +{ + const server = https.createServer( + { ...certFixture }, + common.mustNotCall()); + + server.on('clientError', common.mustCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: foo\x08foo\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 5: Invalid argument type +{ + assert.throws( + () => https.request({ insecureHTTPParser: 0 }, common.mustNotCall()), + common.expectsError({ + code: 'ERR_INVALID_ARG_TYPE', + message: 'The "options.insecureHTTPParser" property must be of' + + ' type boolean. Received type number (0)' + }) + ); +} diff --git a/test/parallel/test-https-max-header-size-per-stream.js b/test/parallel/test-https-max-header-size-per-stream.js new file mode 100644 index 00000000000000..f7117e16fb43f6 --- /dev/null +++ b/test/parallel/test-https-max-header-size-per-stream.js @@ -0,0 +1,119 @@ +'use strict'; +const common = require('../common'); + +if (!common.hasCrypto) { + common.skip('missing crypto'); +} + +const fixtures = require('../common/fixtures'); +const assert = require('assert'); +const https = require('https'); +const http = require('http'); +const tls = require('tls'); +const MakeDuplexPair = require('../common/duplexpair'); +const { finished } = require('stream'); + +const certFixture = { + key: fixtures.readKey('agent1-key.pem'), + cert: fixtures.readKey('agent1-cert.pem'), + ca: fixtures.readKey('ca1-cert.pem'), +}; + + +// Test that setting the `maxHeaderSize` option works on a per-stream-basis. + +// Test 1: The server sends larger headers than what would otherwise be allowed. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + createConnection: common.mustCall(() => clientSide), + maxHeaderSize: http.maxHeaderSize * 4 + }, common.mustCall((res) => { + assert.strictEqual(res.headers.hello, 'A'.repeat(http.maxHeaderSize * 3)); + res.resume(); // We don’t actually care about contents. + res.on('end', common.mustCall()); + })); + req.end(); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 2: The same as Test 1 except without the option, to make sure it fails. +{ + const { clientSide, serverSide } = MakeDuplexPair(); + + const req = https.request({ + createConnection: common.mustCall(() => clientSide) + }, common.mustNotCall()); + req.end(); + req.on('error', common.mustCall()); + + serverSide.resume(); // Dump the request + serverSide.end('HTTP/1.1 200 OK\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + 'Content-Length: 0\r\n' + + '\r\n\r\n'); +} + +// Test 3: The client sends larger headers than what would otherwise be allowed. +{ + const testData = 'Hello, World!\n'; + const server = https.createServer( + { maxHeaderSize: http.maxHeaderSize * 4, + ...certFixture }, + common.mustCall((req, res) => { + res.statusCode = 200; + res.setHeader('Content-Type', 'text/plain'); + res.end(testData); + })); + + server.on('clientError', common.mustNotCall()); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} + +// Test 4: The same as Test 3 except without the option, to make sure it fails. +{ + const server = https.createServer({ ...certFixture }, common.mustNotCall()); + + // clientError may be emitted multiple times when header is larger than + // maxHeaderSize. + server.on('clientError', common.mustCallAtLeast(() => {}, 1)); + + server.listen(0, common.mustCall(() => { + const client = tls.connect({ + port: server.address().port, + rejectUnauthorized: false + }); + client.write( + 'GET / HTTP/1.1\r\n' + + 'Hello: ' + 'A'.repeat(http.maxHeaderSize * 3) + '\r\n' + + '\r\n\r\n'); + client.end(); + + client.on('data', () => {}); + finished(client, common.mustCall(() => { + server.close(); + })); + })); +} From df17c6281876b35f3e068b30606580a081d50a19 Mon Sep 17 00:00:00 2001 From: Adam Majer Date: Fri, 11 Jun 2021 20:21:22 +0200 Subject: [PATCH 013/133] test: use localhost test instead of connecting to remote Fixes: https://github.com/nodejs/node/issues/39008 PR-URL: https://github.com/nodejs/node/pull/39011 Reviewed-By: Luigi Pinca Reviewed-By: Darshan Sen Reviewed-By: Zijian Liu --- .../parallel/test-https-agent-unref-socket.js | 24 +++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/test/parallel/test-https-agent-unref-socket.js b/test/parallel/test-https-agent-unref-socket.js index b2863a74d817d4..49169b523e1042 100644 --- a/test/parallel/test-https-agent-unref-socket.js +++ b/test/parallel/test-https-agent-unref-socket.js @@ -6,8 +6,24 @@ if (!common.hasCrypto) const https = require('https'); -const request = https.get('https://example.com'); +if (process.argv[2] === 'localhost') { + const request = https.get('https://localhost:' + process.argv[3]); -request.on('socket', (socket) => { - socket.unref(); -}); + request.on('socket', (socket) => { + socket.unref(); + }); +} else { + const assert = require('assert'); + const net = require('net'); + const server = net.createServer(); + server.listen(0); + server.on('listening', () => { + const port = server.address().port; + const { fork } = require('child_process'); + const child = fork(__filename, ['localhost', port], {}); + child.on('close', (exit_code) => { + server.close(); + assert.strictEqual(exit_code, 0); + }); + }); +} From 65b56b3774471b95a48cfa0658c3faefe1728970 Mon Sep 17 00:00:00 2001 From: Momtchil Momtchev Date: Sat, 19 Jun 2021 16:52:59 +0200 Subject: [PATCH 014/133] build: fix building with external builtins PR-URL: https://github.com/nodejs/node/pull/39091 Fixes: https://github.com/nodejs/node/issues/39090 Reviewed-By: Anna Henningsen Reviewed-By: Antoine du Hamel Reviewed-By: Colin Ihrig Reviewed-By: Darshan Sen Reviewed-By: Luigi Pinca --- src/node_native_module.cc | 1 + 1 file changed, 1 insertion(+) diff --git a/src/node_native_module.cc b/src/node_native_module.cc index f788732ae569d4..a1aff0a9c74889 100644 --- a/src/node_native_module.cc +++ b/src/node_native_module.cc @@ -1,5 +1,6 @@ #include "node_native_module.h" #include "util-inl.h" +#include "debug_utils-inl.h" namespace node { namespace native_module { From 21e8720155a827079936ef41c516e7727deeb2e9 Mon Sep 17 00:00:00 2001 From: Rohan Sharma Date: Wed, 23 Jun 2021 20:14:56 +0530 Subject: [PATCH 015/133] doc: fix `EventTarget.dispatchEvent` docs PR-URL: https://github.com/nodejs/node/pull/39127 Reviewed-By: Antoine du Hamel Reviewed-By: Benjamin Gruenbaum Reviewed-By: Luigi Pinca --- doc/api/events.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/api/events.md b/doc/api/events.md index ca2ae7e6c84af9..2b0d4ef5ceb6da 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1484,11 +1484,11 @@ target.removeEventListener('foo', handler, { capture: true }); added: v14.5.0 --> -* `event` {Object|Event} +* `event` {Event} +* Returns: {boolean} `true` if either event’s `cancelable` attribute value is + false or its `preventDefault()` method was not invoked, otherwise `false`. -Dispatches the `event` to the list of handlers for `event.type`. The `event` -may be an `Event` object or any object with a `type` property whose value is -a `string`. +Dispatches the `event` to the list of handlers for `event.type`. The registered event listeners is synchronously invoked in the order they were registered. From fe1c81f24733cef7cadfba4da14d6fc841761398 Mon Sep 17 00:00:00 2001 From: Voltrex Date: Fri, 18 Jun 2021 06:39:40 +0430 Subject: [PATCH 016/133] wasi: use missing validator The `wasi` lib module's `initialize()` method is missing a validator. PR-URL: https://github.com/nodejs/node/pull/39070 Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca Reviewed-By: Darshan Sen --- lib/internal/validators.js | 6 ++++++ lib/wasi.js | 17 ++++------------- test/wasi/test-wasi-initialize-validation.js | 3 ++- test/wasi/test-wasi-start-validation.js | 3 ++- 4 files changed, 14 insertions(+), 15 deletions(-) diff --git a/lib/internal/validators.js b/lib/internal/validators.js index 6abe332fc5fdc0..9bca0aa747725e 100644 --- a/lib/internal/validators.js +++ b/lib/internal/validators.js @@ -229,6 +229,11 @@ const validateFunction = hideStackFrames((value, name) => { throw new ERR_INVALID_ARG_TYPE(name, 'Function', value); }); +const validateUndefined = hideStackFrames((value, name) => { + if (value !== undefined) + throw new ERR_INVALID_ARG_TYPE(name, 'undefined', value); +}); + module.exports = { isInt32, isUint32, @@ -247,6 +252,7 @@ module.exports = { validateSignalName, validateString, validateUint32, + validateUndefined, validateCallback, validateAbortSignal, }; diff --git a/lib/wasi.js b/lib/wasi.js index 63209ce716cb9a..43ecf94b4cb050 100644 --- a/lib/wasi.js +++ b/lib/wasi.js @@ -21,6 +21,7 @@ const { validateFunction, validateInt32, validateObject, + validateUndefined, } = require('internal/validators'); const { WASI: _WASI } = internalBinding('wasi'); const kExitCode = Symbol('kExitCode'); @@ -120,10 +121,7 @@ class WASI { const { _start, _initialize } = this[kInstance].exports; validateFunction(_start, 'instance.exports._start'); - if (_initialize !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._initialize', 'undefined', _initialize); - } + validateUndefined(_initialize, 'instance.exports._initialize'); try { _start(); @@ -147,16 +145,9 @@ class WASI { const { _start, _initialize } = this[kInstance].exports; - if (typeof _initialize !== 'function' && _initialize !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._initialize', 'function', _initialize); - } - if (_start !== undefined) { - throw new ERR_INVALID_ARG_TYPE( - 'instance.exports._start', 'undefined', _initialize); - } - + validateUndefined(_start, 'instance.exports._start'); if (_initialize !== undefined) { + validateFunction(_initialize, 'instance.exports._initialize'); _initialize(); } } diff --git a/test/wasi/test-wasi-initialize-validation.js b/test/wasi/test-wasi-initialize-validation.js index 79b0bd8485a483..40dfd864d1874e 100644 --- a/test/wasi/test-wasi-initialize-validation.js +++ b/test/wasi/test-wasi-initialize-validation.js @@ -78,7 +78,8 @@ const bufferSource = fixtures.readSync('simple.wasm'); () => { wasi.initialize(instance); }, { code: 'ERR_INVALID_ARG_TYPE', - message: /"instance\.exports\._start" property must be undefined/ + message: 'The "instance.exports._start" property must be' + + ' undefined. Received function _start', } ); } diff --git a/test/wasi/test-wasi-start-validation.js b/test/wasi/test-wasi-start-validation.js index 5c6a1ede5d4fd7..2059ff081e88dd 100644 --- a/test/wasi/test-wasi-start-validation.js +++ b/test/wasi/test-wasi-start-validation.js @@ -78,7 +78,8 @@ const bufferSource = fixtures.readSync('simple.wasm'); () => { wasi.start(instance); }, { code: 'ERR_INVALID_ARG_TYPE', - message: /"instance\.exports\._initialize" property must be undefined/ + message: 'The "instance.exports._initialize" property must be' + + ' undefined. Received function _initialize', } ); } From 83f3b959f974a49ba2c2524a5c502cf4d2c6c7bd Mon Sep 17 00:00:00 2001 From: Voltrex Date: Mon, 14 Jun 2021 03:37:54 +0430 Subject: [PATCH 017/133] fs: allow empty string for temp directory prefix The `fs` lib module's `mkdtemp()` and `mkdtempSync()` methods were missing a validator, and weren't allowing the empty string as a valid prefix. PR-URL: https://github.com/nodejs/node/pull/39028 Reviewed-By: Darshan Sen Reviewed-By: Zijian Liu Reviewed-By: Antoine du Hamel Reviewed-By: Luigi Pinca Reviewed-By: Khaidi Chu --- doc/api/fs.md | 11 +++++++++++ lib/fs.js | 12 +++++------- lib/internal/fs/promises.js | 7 +++---- test/parallel/test-fs-mkdtemp-prefix-check.js | 2 +- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index 5f1ea82b3fc8a6..89f0c14ff9e5dc 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -802,6 +802,10 @@ rejection only when `recursive` is false. ### `fsPromises.mkdtemp(prefix[, options])` * `prefix` {string} @@ -2572,6 +2576,9 @@ See the POSIX mkdir(2) documentation for more details. * `prefix` {string} diff --git a/lib/fs.js b/lib/fs.js index 46209a3f4d58c0..cf3c885b31cdf8 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -74,7 +74,6 @@ const { codes: { ERR_FS_FILE_TOO_LARGE, ERR_INVALID_ARG_VALUE, - ERR_INVALID_ARG_TYPE, ERR_FEATURE_UNAVAILABLE_ON_PLATFORM, }, AbortError, @@ -136,6 +135,7 @@ const { validateEncoding, validateFunction, validateInteger, + validateString, } = require('internal/validators'); const watchers = require('internal/fs/watchers'); @@ -2712,9 +2712,8 @@ realpath.native = (path, options, callback) => { function mkdtemp(prefix, options, callback) { callback = makeCallback(typeof options === 'function' ? options : callback); options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix, 'prefix'); warnOnNonPortableTemplate(prefix); const req = new FSReqCallback(); @@ -2730,9 +2729,8 @@ function mkdtemp(prefix, options, callback) { */ function mkdtempSync(prefix, options) { options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix, 'prefix'); warnOnNonPortableTemplate(prefix); const path = `${prefix}XXXXXX`; diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 62b4ae10b21fc4..d356d8726fc482 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -28,7 +28,6 @@ const { Buffer } = require('buffer'); const { codes: { ERR_FS_FILE_TOO_LARGE, - ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, ERR_METHOD_NOT_IMPLEMENTED, }, @@ -72,6 +71,7 @@ const { validateBuffer, validateEncoding, validateInteger, + validateString, } = require('internal/validators'); const pathModule = require('path'); const { promisify } = require('internal/util'); @@ -693,9 +693,8 @@ async function realpath(path, options) { async function mkdtemp(prefix, options) { options = getOptions(options, {}); - if (!prefix || typeof prefix !== 'string') { - throw new ERR_INVALID_ARG_TYPE('prefix', 'string', prefix); - } + + validateString(prefix, 'prefix'); nullCheck(prefix); warnOnNonPortableTemplate(prefix); return binding.mkdtemp(`${prefix}XXXXXX`, options.encoding, kUsePromises); diff --git a/test/parallel/test-fs-mkdtemp-prefix-check.js b/test/parallel/test-fs-mkdtemp-prefix-check.js index 1d9d88232a067e..33a06914a46e10 100644 --- a/test/parallel/test-fs-mkdtemp-prefix-check.js +++ b/test/parallel/test-fs-mkdtemp-prefix-check.js @@ -3,7 +3,7 @@ const common = require('../common'); const assert = require('assert'); const fs = require('fs'); -const prefixValues = [undefined, null, 0, true, false, 1, '']; +const prefixValues = [undefined, null, 0, true, false, 1]; function fail(value) { assert.throws( From 471510558103de5ea8c67700dabfe6128597895c Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Mon, 14 Jun 2021 17:48:40 +0200 Subject: [PATCH 018/133] tools: upgrade `highlight.js` to version 11.0.1 Refs: https://github.com/highlightjs/highlight.js/blob/main/VERSION_11_UPGRADE.md PR-URL: https://github.com/nodejs/node/pull/39032 Reviewed-By: Luigi Pinca --- doc/api_assets/hljs.css | 8 +++++--- tools/doc/package-lock.json | 18 +++++++++--------- tools/doc/package.json | 2 +- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/doc/api_assets/hljs.css b/doc/api_assets/hljs.css index bbf076987530d6..4893f9de26fab4 100644 --- a/doc/api_assets/hljs.css +++ b/doc/api_assets/hljs.css @@ -8,7 +8,8 @@ } .hljs-attribute, -.hljs-keyword { +.hljs-keyword, +.hljs-type { color: #338; } @@ -35,9 +36,10 @@ color: var(--green4); } -.dark-mode .hljs-keyword, .dark-mode .hljs-attribute, -.dark-mode .hljs-doctag { +.dark-mode .hljs-doctag, +.dark-mode .hljs-keyword, +.dark-mode .hljs-type { color: #66d9ef; } diff --git a/tools/doc/package-lock.json b/tools/doc/package-lock.json index 9d9ce759c0896e..598a8fccd30e78 100644 --- a/tools/doc/package-lock.json +++ b/tools/doc/package-lock.json @@ -11,7 +11,7 @@ "node-doc-generator": "generate.js" }, "devDependencies": { - "highlight.js": "10.7.3", + "highlight.js": "11.0.1", "js-yaml": "4.1.0", "rehype-raw": "5.1.0", "rehype-stringify": "8.0.0", @@ -25,7 +25,7 @@ "unist-util-visit": "3.1.0" }, "engines": { - "node": ">=12.10.0" + "node": ">=14.8.0" } }, "node_modules/@types/hast": { @@ -356,12 +356,12 @@ } }, "node_modules/highlight.js": { - "version": "10.7.3", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", - "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", + "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", "dev": true, "engines": { - "node": "*" + "node": ">=12.0.0" } }, "node_modules/html-void-elements": { @@ -1513,9 +1513,9 @@ } }, "highlight.js": { - "version": "10.7.3", - "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-10.7.3.tgz", - "integrity": "sha512-tzcUFauisWKNHaRkN4Wjl/ZA07gENAjFl3J/c480dprkGTg5EQstgaNFqBfUqCq54kZRIEcreTsAgF/m2quD7A==", + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.0.1.tgz", + "integrity": "sha512-EqYpWyTF2s8nMfttfBA2yLKPNoZCO33pLS4MnbXQ4hECf1TKujCt1Kq7QAdrio7roL4+CqsfjqwYj4tYgq0pJQ==", "dev": true }, "html-void-elements": { diff --git a/tools/doc/package.json b/tools/doc/package.json index 223220bcc1c7e5..4a5624952f85ed 100644 --- a/tools/doc/package.json +++ b/tools/doc/package.json @@ -7,7 +7,7 @@ "node": ">=14.8.0" }, "devDependencies": { - "highlight.js": "10.7.3", + "highlight.js": "11.0.1", "js-yaml": "4.1.0", "rehype-raw": "5.1.0", "rehype-stringify": "8.0.0", From a669a191a1ae7a7581025c7424ee0154e04a27ae Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 27 Jun 2021 14:25:47 +0200 Subject: [PATCH 019/133] doc: use ASCII order for md refs PR-URL: https://github.com/nodejs/node/pull/39170 Refs: https://github.com/nodejs/remark-preset-lint-node/pull/188 Reviewed-By: Rich Trott Reviewed-By: Darshan Sen --- doc/api/assert.md | 4 ++-- doc/api/async_hooks.md | 2 +- doc/api/cluster.md | 2 +- doc/api/crypto.md | 2 +- doc/api/diagnostics_channel.md | 4 ++-- doc/api/esm.md | 4 ++-- doc/api/events.md | 4 ++-- doc/api/fs.md | 2 +- doc/api/http.md | 14 +++++++------- doc/api/http2.md | 4 ++-- doc/api/modules.md | 4 ++-- doc/api/packages.md | 10 +++++----- doc/api/stream.md | 4 ++-- doc/api/tls.md | 8 ++++---- doc/api/url.md | 2 +- doc/api/worker_threads.md | 6 +++--- src/README.md | 2 +- 17 files changed, 39 insertions(+), 39 deletions(-) diff --git a/doc/api/assert.md b/doc/api/assert.md index 037d233be0acd9..439841f6f0013e 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -2421,6 +2421,7 @@ argument. [SameValue Comparison]: https://tc39.github.io/ecma262/#sec-samevalue [Strict Equality Comparison]: https://tc39.github.io/ecma262/#sec-strict-equality-comparison [`AssertionError`]: #assert_class_assert_assertionerror +[`CallTracker`]: #assert_class_assert_calltracker [`Class`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes [`ERR_INVALID_RETURN_VALUE`]: errors.md#errors_err_invalid_return_value [`Error.captureStackTrace`]: errors.md#errors_error_capturestacktrace_targetobject_constructoropt @@ -2433,7 +2434,6 @@ argument. [`TypeError`]: errors.md#errors_class_typeerror [`WeakMap`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakMap [`WeakSet`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WeakSet -[`CallTracker`]: #assert_class_assert_calltracker [`assert.deepEqual()`]: #assert_assert_deepequal_actual_expected_message [`assert.deepStrictEqual()`]: #assert_assert_deepstrictequal_actual_expected_message [`assert.doesNotThrow()`]: #assert_assert_doesnotthrow_fn_error_message @@ -2448,6 +2448,6 @@ argument. [`process.on('exit')`]: process.md#process_event_exit [`tracker.calls()`]: #assert_tracker_calls_fn_exact [`tracker.verify()`]: #assert_tracker_verify -[strict assertion mode]: #assert_strict_assertion_mode [enumerable "own" properties]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Enumerability_and_ownership_of_properties [prototype-spec]: https://tc39.github.io/ecma262/#sec-ordinary-object-internal-methods-and-internal-slots +[strict assertion mode]: #assert_strict_assertion_mode diff --git a/doc/api/async_hooks.md b/doc/api/async_hooks.md index e09e89fc377cbb..851b3fe3a907fc 100644 --- a/doc/api/async_hooks.md +++ b/doc/api/async_hooks.md @@ -845,10 +845,10 @@ The documentation for this class has moved [`AsyncLocalStorage`][]. [PromiseHooks]: https://docs.google.com/document/d/1rda3yKGHimKIhg5YeoAmCOtyURgsbTH_qaYR79FELlk/edit [`AsyncLocalStorage`]: async_context.md#async_context_class_asynclocalstorage [`AsyncResource`]: async_context.md#async_context_class_asyncresource +[`Worker`]: worker_threads.md#worker_threads_class_worker [`after` callback]: #async_hooks_after_asyncid [`before` callback]: #async_hooks_before_asyncid [`destroy` callback]: #async_hooks_destroy_asyncid [`init` callback]: #async_hooks_init_asyncid_type_triggerasyncid_resource [`promiseResolve` callback]: #async_hooks_promiseresolve_asyncid -[`Worker`]: worker_threads.md#worker_threads_class_worker [promise execution tracking]: #async_hooks_promise_execution_tracking diff --git a/doc/api/cluster.md b/doc/api/cluster.md index 2b515ab82e3b6b..8675a510013540 100644 --- a/doc/api/cluster.md +++ b/doc/api/cluster.md @@ -1082,9 +1082,9 @@ socket.on('data', (id) => { [`child_process.fork()`]: child_process.md#child_process_child_process_fork_modulepath_args_options [`child_process` event: `'exit'`]: child_process.md#child_process_event_exit [`child_process` event: `'message'`]: child_process.md#child_process_event_message +[`cluster.isPrimary`]: #cluster_cluster_isprimary [`cluster.settings`]: #cluster_cluster_settings [`disconnect()`]: child_process.md#child_process_subprocess_disconnect -[`cluster.isPrimary`]: #cluster_cluster_isprimary [`kill()`]: process.md#process_process_kill_pid_signal [`process` event: `'message'`]: process.md#process_event_message [`server.close()`]: net.md#net_event_close diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 2daca54e7b7c47..8defe0487d31bf 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -5640,7 +5640,6 @@ See the [list of SSL OP Flags][] for details. [AEAD algorithms]: https://en.wikipedia.org/wiki/Authenticated_encryption [CCM mode]: #crypto_ccm_mode [Caveats]: #crypto_support_for_weak_or_compromised_algorithms -[caveats when using strings as inputs to cryptographic APIs]: #crypto_using_strings_as_inputs_to_cryptographic_apis [Crypto constants]: #crypto_crypto_constants_1 [HTML 5.2]: https://www.w3.org/TR/html52/changes.html#features-removed [HTML5's `keygen` element]: https://developer.mozilla.org/en-US/docs/Web/HTML/Element/keygen @@ -5710,6 +5709,7 @@ See the [list of SSL OP Flags][] for details. [`util.promisify()`]: util.md#util_util_promisify_original [`verify.update()`]: #crypto_verify_update_data_inputencoding [`verify.verify()`]: #crypto_verify_verify_object_signature_signatureencoding +[caveats when using strings as inputs to cryptographic APIs]: #crypto_using_strings_as_inputs_to_cryptographic_apis [certificate object]: tls.md#tls_certificate_object [encoding]: buffer.md#buffer_buffers_and_character_encodings [initialization vector]: https://en.wikipedia.org/wiki/Initialization_vector diff --git a/doc/api/diagnostics_channel.md b/doc/api/diagnostics_channel.md index ba02a5092c3731..8a591da61f43ca 100644 --- a/doc/api/diagnostics_channel.md +++ b/doc/api/diagnostics_channel.md @@ -257,6 +257,6 @@ channel.subscribe(onMessage); channel.unsubscribe(onMessage); ``` -[`diagnostics_channel.channel(name)`]: #diagnostics_channel_diagnostics_channel_channel_name -[`channel.subscribe(onMessage)`]: #diagnostics_channel_channel_subscribe_onmessage [`'uncaughtException'`]: process.md#process_event_uncaughtexception +[`channel.subscribe(onMessage)`]: #diagnostics_channel_channel_subscribe_onmessage +[`diagnostics_channel.channel(name)`]: #diagnostics_channel_diagnostics_channel_channel_name diff --git a/doc/api/esm.md b/doc/api/esm.md index 72bbf1c51cebe9..f3827b5663966c 100644 --- a/doc/api/esm.md +++ b/doc/api/esm.md @@ -1338,15 +1338,15 @@ success! [`data:` URLs]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs [`export`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/export [`import()`]: #esm_import_expressions -[`import.meta.url`]: #esm_import_meta_url [`import.meta.resolve`]: #esm_import_meta_resolve_specifier_parent +[`import.meta.url`]: #esm_import_meta_url [`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import [`module.createRequire()`]: module.md#module_module_createrequire_filename [`module.syncBuiltinESMExports()`]: module.md#module_module_syncbuiltinesmexports [`package.json`]: packages.md#packages_node_js_package_json_field_definitions [`process.dlopen`]: process.md#process_process_dlopen_module_filename_flags -[`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource [`string`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String +[`transformSource` hook]: #esm_transformsource_source_context_defaulttransformsource [`util.TextDecoder`]: util.md#util_class_util_textdecoder [cjs-module-lexer]: https://github.com/guybedford/cjs-module-lexer/tree/1.2.1 [custom https loader]: #esm_https_loader diff --git a/doc/api/events.md b/doc/api/events.md index 2b0d4ef5ceb6da..c9c2e43353d7ba 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1636,8 +1636,8 @@ to the `EventTarget`. [`fs.ReadStream`]: fs.md#fs_class_fs_readstream [`net.Server`]: net.md#net_class_net_server [`process.on('warning')`]: process.md#process_event_warning -[stream]: stream.md [capturerejections]: #events_capture_rejections_of_promises +[error]: #events_error_events [rejection]: #events_emitter_symbol_for_nodejs_rejection_err_eventname_args [rejectionsymbol]: #events_events_capturerejectionsymbol -[error]: #events_error_events +[stream]: stream.md diff --git a/doc/api/fs.md b/doc/api/fs.md index 89f0c14ff9e5dc..f5c2552bbb8cf3 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -6706,7 +6706,6 @@ the file contents. [Naming Files, Paths, and Namespaces]: https://docs.microsoft.com/en-us/windows/desktop/FileIO/naming-a-file [Readable Stream]: stream.md#stream_class_stream_readable [Writable Stream]: stream.md#stream_class_stream_writable -[caveats]: #fs_caveats [`AHAFS`]: https://developer.ibm.com/articles/au-aix_event_infrastructure/ [`Buffer.byteLength`]: buffer.md#buffer_static_method_buffer_bytelength_string_encoding [`FSEvents`]: https://developer.apple.com/documentation/coreservices/file_system_events @@ -6759,6 +6758,7 @@ the file contents. [`kqueue(2)`]: https://www.freebsd.org/cgi/man.cgi?query=kqueue&sektion=2 [`util.promisify()`]: util.md#util_util_promisify_original [bigints]: https://tc39.github.io/proposal-bigint +[caveats]: #fs_caveats [chcp]: https://ss64.com/nt/chcp.html [inode]: https://en.wikipedia.org/wiki/Inode [support of file system `flags`]: #fs_file_system_flags diff --git a/doc/api/http.md b/doc/api/http.md index 9c4807c53afcec..e3d2b08deae3c8 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -3140,13 +3140,13 @@ try { } ``` -[`--insecure-http-parser`]: cli.md#cli_insecure_http_parser -[`--max-http-header-size`]: cli.md#cli_max_http_header_size_size [`'checkContinue'`]: #http_event_checkcontinue [`'finish'`]: #http_event_finish [`'request'`]: #http_event_request [`'response'`]: #http_event_response [`'upgrade'`]: #http_event_upgrade +[`--insecure-http-parser`]: cli.md#cli_insecure_http_parser +[`--max-http-header-size`]: cli.md#cli_max_http_header_size_size [`Agent`]: #http_class_http_agent [`Buffer.byteLength()`]: buffer.md#buffer_static_method_buffer_bytelength_string_encoding [`Duplex`]: stream.md#stream_class_stream_duplex @@ -3162,37 +3162,37 @@ try { [`http.Agent`]: #http_class_http_agent [`http.ClientRequest`]: #http_class_http_clientrequest [`http.IncomingMessage`]: #http_class_http_incomingmessage -[`http.Server`]: #http_class_http_server [`http.ServerResponse`]: #http_class_http_serverresponse +[`http.Server`]: #http_class_http_server [`http.get()`]: #http_http_get_options_callback [`http.globalAgent`]: #http_http_globalagent [`http.request()`]: #http_http_request_options_callback [`message.headers`]: #http_message_headers +[`message.socket`]: #http_message_socket [`net.Server.close()`]: net.md#net_server_close_callback [`net.Server`]: net.md#net_class_net_server [`net.Socket`]: net.md#net_class_net_socket [`net.createConnection()`]: net.md#net_net_createconnection_options_connectlistener [`new URL()`]: url.md#url_new_url_input_base -[`message.socket`]: #http_message_socket [`outgoingMessage.socket`]: #http_outgoingMessage.socket [`removeHeader(name)`]: #http_request_removeheader_name -[`request.end()`]: #http_request_end_data_encoding_callback [`request.destroy()`]: #http_request_destroy_error +[`request.end()`]: #http_request_end_data_encoding_callback [`request.flushHeaders()`]: #http_request_flushheaders [`request.getHeader()`]: #http_request_getheader_name [`request.setHeader()`]: #http_request_setheader_name_value [`request.setTimeout()`]: #http_request_settimeout_timeout_callback [`request.socket.getPeerCertificate()`]: tls.md#tls_tlssocket_getpeercertificate_detailed [`request.socket`]: #http_request_socket -[`request.writableFinished`]: #http_request_writablefinished [`request.writableEnded`]: #http_request_writableended +[`request.writableFinished`]: #http_request_writablefinished [`request.write(data, encoding)`]: #http_request_write_chunk_encoding_callback [`response.end()`]: #http_response_end_data_encoding_callback [`response.getHeader()`]: #http_response_getheader_name [`response.setHeader()`]: #http_response_setheader_name_value [`response.socket`]: #http_response_socket -[`response.writableFinished`]: #http_response_writablefinished [`response.writableEnded`]: #http_response_writableended +[`response.writableFinished`]: #http_response_writablefinished [`response.write()`]: #http_response_write_chunk_encoding_callback [`response.write(data, encoding)`]: #http_response_write_chunk_encoding_callback [`response.writeContinue()`]: #http_response_writecontinue diff --git a/doc/api/http2.md b/doc/api/http2.md index 13d187321c6780..58081253229a83 100644 --- a/doc/api/http2.md +++ b/doc/api/http2.md @@ -3852,6 +3852,7 @@ you need to implement any fall-back behavior yourself. [RFC 7838]: https://tools.ietf.org/html/rfc7838 [RFC 8336]: https://tools.ietf.org/html/rfc8336 [RFC 8441]: https://tools.ietf.org/html/rfc8441 +[Sensitive headers]: #http2_sensitive_headers [`'checkContinue'`]: #http2_event_checkcontinue [`'connect'`]: #http2_event_connect [`'request'`]: #http2_event_request @@ -3881,8 +3882,8 @@ you need to implement any fall-back behavior yourself. [`net.connect()`]: net.md#net_net_connect [`net.createServer()`]: net.md#net_net_createserver_options_connectionlistener [`request.authority`]: #http2_request_authority -[`request.socket`]: #http2_request_socket [`request.socket.getPeerCertificate()`]: tls.md#tls_tlssocket_getpeercertificate_detailed +[`request.socket`]: #http2_request_socket [`response.end()`]: #http2_response_end_data_encoding_callback [`response.setHeader()`]: #http2_response_setheader_name_value [`response.socket`]: #http2_response_socket @@ -3897,4 +3898,3 @@ you need to implement any fall-back behavior yourself. [`tls.createServer()`]: tls.md#tls_tls_createserver_options_secureconnectionlistener [`writable.writableFinished`]: stream.md#stream_writable_writablefinished [error code]: #http2_error_codes_for_rst_stream_and_goaway -[Sensitive headers]: #http2_sensitive_headers diff --git a/doc/api/modules.md b/doc/api/modules.md index a28be94e536e49..7ddfeab3a7b229 100644 --- a/doc/api/modules.md +++ b/doc/api/modules.md @@ -1009,9 +1009,9 @@ This section was moved to [`Error`]: errors.md#errors_class_error [`__dirname`]: #modules_dirname [`__filename`]: #modules_filename -[`module` object]: #modules_the_module_object -[`module.id`]: #modules_module_id [`module.children`]: #modules_module_children +[`module.id`]: #modules_module_id +[`module` object]: #modules_the_module_object [`package.json`]: packages.md#packages_node_js_package_json_field_definitions [`path.dirname()`]: path.md#path_path_dirname_path [`require.main`]: #modules_require_main diff --git a/doc/api/packages.md b/doc/api/packages.md index cd697e9785fded..770fd338abcb82 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -1171,23 +1171,23 @@ Import maps permit mapping to external packages. This field defines [subpath imports][] for the current package. [Babel]: https://babeljs.io/ -[Conditional exports]: #packages_conditional_exports [CommonJS]: modules.md +[Conditional exports]: #packages_conditional_exports [ES module]: esm.md [ES modules]: esm.md [Node.js documentation for this section]: https://github.com/nodejs/node/blob/HEAD/doc/api/packages.md#conditions-definitions -[`ERR_PACKAGE_PATH_NOT_EXPORTED`]: errors.md#errors_err_package_path_not_exported -[`esm`]: https://github.com/standard-things/esm#readme [`"exports"`]: #packages_exports +[`"imports"`]: #packages_imports [`"main"`]: #packages_main [`"name"`]: #packages_name -[`"imports"`]: #packages_imports [`"type"`]: #packages_type +[`ERR_PACKAGE_PATH_NOT_EXPORTED`]: errors.md#errors_err_package_path_not_exported +[`esm`]: https://github.com/standard-things/esm#readme [`package.json`]: #packages_node_js_package_json_field_definitions [entry points]: #packages_package_entry_points [self-reference]: #packages_self_referencing_a_package_using_its_name [subpath exports]: #packages_subpath_exports [subpath imports]: #packages_subpath_imports [subpath patterns]: #packages_subpath_patterns -[the full specifier path]: esm.md#esm_mandatory_file_extensions [the dual CommonJS/ES module packages section]: #packages_dual_commonjs_es_module_packages +[the full specifier path]: esm.md#esm_mandatory_file_extensions diff --git a/doc/api/stream.md b/doc/api/stream.md index 57c72c45a176f6..289890840616ca 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -3293,6 +3293,7 @@ contain multi-byte characters. [HTTP requests, on the client]: http.md#http_class_http_clientrequest [HTTP responses, on the server]: http.md#http_class_http_serverresponse [TCP sockets]: net.md#net_class_net_socket +[Three states]: #stream_three_states [`'data'`]: #stream_event_data [`'drain'`]: #stream_event_drain [`'end'`]: #stream_event_end @@ -3314,11 +3315,11 @@ contain multi-byte characters. [`readable.push('')`]: #stream_readable_push [`readable.setEncoding()`]: #stream_readable_setencoding_encoding [`stream.Readable.from()`]: #stream_stream_readable_from_iterable_options +[`stream.addAbortSignal()`]: #stream_stream_addabortsignal_signal_stream [`stream.cork()`]: #stream_writable_cork [`stream.finished()`]: #stream_stream_finished_stream_options_callback [`stream.pipe()`]: #stream_readable_pipe_destination_options [`stream.pipeline()`]: #stream_stream_pipeline_source_transforms_destination_callback -[`stream.addAbortSignal()`]: #stream_stream_addabortsignal_signal_stream [`stream.uncork()`]: #stream_writable_uncork [`stream.unpipe()`]: #stream_readable_unpipe_destination [`stream.wrap()`]: #stream_readable_wrap_stream @@ -3354,7 +3355,6 @@ contain multi-byte characters. [stream-resume]: #stream_readable_resume [stream-uncork]: #stream_writable_uncork [stream-write]: #stream_writable_write_chunk_encoding_callback -[Three states]: #stream_three_states [writable-_construct]: #stream_writable_construct_callback [writable-_destroy]: #stream_writable_destroy_err_callback [writable-destroy]: #stream_writable_destroy_error diff --git a/doc/api/tls.md b/doc/api/tls.md index b82de483f9b825..2b4c6bf111f3da 100644 --- a/doc/api/tls.md +++ b/doc/api/tls.md @@ -2073,14 +2073,14 @@ added: v11.4.0 [Session Resumption]: #tls_session_resumption [Stream]: stream.md#stream_stream [TLS recommendations]: https://wiki.mozilla.org/Security/Server_Side_TLS -[`--tls-cipher-list`]: cli.md#cli_tls_cipher_list_list -[`Duplex`]: stream.md#stream_class_stream_duplex -[`NODE_OPTIONS`]: cli.md#cli_node_options_options [`'newSession'`]: #tls_event_newsession [`'resumeSession'`]: #tls_event_resumesession [`'secureConnect'`]: #tls_event_secureconnect [`'secureConnection'`]: #tls_event_secureconnection [`'session'`]: #tls_event_session +[`--tls-cipher-list`]: cli.md#cli_tls_cipher_list_list +[`Duplex`]: stream.md#stream_class_stream_duplex +[`NODE_OPTIONS`]: cli.md#cli_node_options_options [`SSL_export_keying_material`]: https://www.openssl.org/docs/man1.1.1/man3/SSL_export_keying_material.html [`SSL_get_version`]: https://www.openssl.org/docs/man1.1.1/man3/SSL_get_version.html [`crypto.getCurves()`]: crypto.md#crypto_crypto_getcurves @@ -2113,5 +2113,5 @@ added: v11.4.0 [cipher list format]: https://www.openssl.org/docs/man1.1.1/man1/ciphers.html#CIPHER-LIST-FORMAT [forward secrecy]: https://en.wikipedia.org/wiki/Perfect_forward_secrecy [modifying the default cipher suite]: #tls_modifying_the_default_tls_cipher_suite -[specific attacks affecting larger AES key sizes]: https://www.schneier.com/blog/archives/2009/07/another_new_aes.html [perfect forward secrecy]: #tls_perfect_forward_secrecy +[specific attacks affecting larger AES key sizes]: https://www.schneier.com/blog/archives/2009/07/another_new_aes.html diff --git a/doc/api/url.md b/doc/api/url.md index 452d9cfac9250e..23443b030a90f7 100644 --- a/doc/api/url.md +++ b/doc/api/url.md @@ -1593,8 +1593,8 @@ console.log(myURL.origin); [ICU]: intl.md#intl_options_for_building_node_js [Punycode]: https://tools.ietf.org/html/rfc5891#section-4.4 -[WHATWG URL Standard]: https://url.spec.whatwg.org/ [WHATWG URL]: #url_the_whatwg_url_api +[WHATWG URL Standard]: https://url.spec.whatwg.org/ [`Error`]: errors.md#errors_class_error [`JSON.stringify()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify [`Map`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index f345b9dba76913..1ca8852308902b 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -1259,6 +1259,7 @@ thread spawned will spawn another until the application crashes. [`SharedArrayBuffer`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/SharedArrayBuffer [`Uint8Array`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Uint8Array [`WebAssembly.Module`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/WebAssembly/Module +[`Worker constructor options`]: #worker_threads_new_worker_filename_options [`Worker`]: #worker_threads_class_worker [`cluster` module]: cluster.md [`data:` URL]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URIs @@ -1281,17 +1282,16 @@ thread spawned will spawn another until the application crashes. [`process.title`]: process.md#process_process_title [`require('worker_threads').isMainThread`]: #worker_threads_worker_ismainthread [`require('worker_threads').parentPort.on('message')`]: #worker_threads_event_message -[`require('worker_threads').parentPort`]: #worker_threads_worker_parentport [`require('worker_threads').parentPort.postMessage()`]: #worker_threads_worker_postmessage_value_transferlist +[`require('worker_threads').parentPort`]: #worker_threads_worker_parentport [`require('worker_threads').threadId`]: #worker_threads_worker_threadid [`require('worker_threads').workerData`]: #worker_threads_worker_workerdata [`trace_events`]: tracing.md [`v8.getHeapSnapshot()`]: v8.md#v8_v8_getheapsnapshot [`vm`]: vm.md -[`Worker constructor options`]: #worker_threads_new_worker_filename_options +[`worker.SHARE_ENV`]: #worker_threads_worker_share_env [`worker.on('message')`]: #worker_threads_event_message_1 [`worker.postMessage()`]: #worker_threads_worker_postmessage_value_transferlist -[`worker.SHARE_ENV`]: #worker_threads_worker_share_env [`worker.terminate()`]: #worker_threads_worker_terminate [`worker.threadId`]: #worker_threads_worker_threadid_1 [async-resource-worker-pool]: async_hooks.md#async-resource-worker-pool diff --git a/src/README.md b/src/README.md index b278c0287366cf..91b9f3817a36b0 100644 --- a/src/README.md +++ b/src/README.md @@ -1036,7 +1036,7 @@ static void GetUserInfo(const FunctionCallbackInfo& args) { [exception handling]: #exception-handling [internal field]: #internal-fields [introduction for V8 embedders]: https://v8.dev/docs/embed +[libuv]: https://libuv.org/ [libuv handles]: #libuv-handles-and-requests [libuv requests]: #libuv-handles-and-requests -[libuv]: https://libuv.org/ [reference documentation for the libuv API]: http://docs.libuv.org/en/v1.x/ From 0e55cb72dfe442b65473c2b442dcb7ab7c512f69 Mon Sep 17 00:00:00 2001 From: XadillaX Date: Mon, 21 Jun 2021 11:34:48 +0800 Subject: [PATCH 020/133] lib: make lazyDOMException more common PR-URL: https://github.com/nodejs/node/pull/39105 Reviewed-By: Antoine du Hamel --- lib/buffer.js | 13 +++---------- lib/internal/crypto/aes.js | 5 ++++- lib/internal/crypto/diffiehellman.js | 5 ++++- lib/internal/crypto/dsa.js | 5 ++++- lib/internal/crypto/ec.js | 5 ++++- lib/internal/crypto/hkdf.js | 5 ++++- lib/internal/crypto/mac.js | 5 ++++- lib/internal/crypto/pbkdf2.js | 5 ++++- lib/internal/crypto/random.js | 2 +- lib/internal/crypto/rsa.js | 5 ++++- lib/internal/crypto/scrypt.js | 5 ++++- lib/internal/crypto/util.js | 9 +-------- lib/internal/crypto/webcrypto.js | 5 ++++- lib/internal/fs/promises.js | 5 ++--- lib/internal/util.js | 9 +++++++++ 15 files changed, 56 insertions(+), 32 deletions(-) diff --git a/lib/buffer.js b/lib/buffer.js index 18b90f40527921..278a67cbbfb37e 100644 --- a/lib/buffer.js +++ b/lib/buffer.js @@ -75,6 +75,7 @@ const { const { customInspectSymbol, isInsideNodeModules, + lazyDOMException, normalizeEncoding, kIsEncodingSymbol } = require('internal/util'); @@ -1208,14 +1209,6 @@ if (internalBinding('config').hasIntl) { }; } -let DOMException; - -const lazyInvalidCharError = hideStackFrames((message, name) => { - if (DOMException === undefined) - DOMException = internalBinding('messaging').DOMException; - throw new DOMException('Invalid character', 'InvalidCharacterError'); -}); - function btoa(input) { // The implementation here has not been performance optimized in any way and // should not be. @@ -1223,7 +1216,7 @@ function btoa(input) { input = `${input}`; for (let n = 0; n < input.length; n++) { if (input[n].charCodeAt(0) > 0xff) - lazyInvalidCharError(); + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); } const buf = Buffer.from(input, 'latin1'); return buf.toString('base64'); @@ -1239,7 +1232,7 @@ function atob(input) { input = `${input}`; for (let n = 0; n < input.length; n++) { if (!kBase64Digits.includes(input[n])) - lazyInvalidCharError(); + throw lazyDOMException('Invalid character', 'InvalidCharacterError'); } return Buffer.from(input, 'base64').toString('latin1'); } diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index dd6aa49ff454ab..0675c59ec368ca 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -36,7 +36,6 @@ const { getArrayBufferOrView, hasAnyNotIn, jobPromise, - lazyDOMException, validateByteLength, validateKeyOps, validateMaxBufferLength, @@ -45,6 +44,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { PromiseReject } = primordials; const { diff --git a/lib/internal/crypto/diffiehellman.js b/lib/internal/crypto/diffiehellman.js index 2e38e95ea1d774..2efdbdd5bac9d3 100644 --- a/lib/internal/crypto/diffiehellman.js +++ b/lib/internal/crypto/diffiehellman.js @@ -47,6 +47,10 @@ const { isAnyArrayBuffer, } = require('internal/util/types'); +const { + lazyDOMException, +} = require('internal/util'); + const { KeyObject, InternalCryptoKey, @@ -66,7 +70,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, toBuf, kHandle, kKeyObject, diff --git a/lib/internal/crypto/dsa.js b/lib/internal/crypto/dsa.js index b615c3a9cb932f..54bd70d9e2eac5 100644 --- a/lib/internal/crypto/dsa.js +++ b/lib/internal/crypto/dsa.js @@ -44,13 +44,16 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, kKeyObject, kHandle, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + function verifyAcceptableDsaKeyUse(name, type, usages) { let checkSet; switch (type) { diff --git a/lib/internal/crypto/ec.js b/lib/internal/crypto/ec.js index ee14eed7d083a7..8bc7d9a28a42ea 100644 --- a/lib/internal/crypto/ec.js +++ b/lib/internal/crypto/ec.js @@ -38,7 +38,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, kHandle, @@ -46,6 +45,10 @@ const { kNamedCurveAliases, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { generateKeyPair, } = require('internal/crypto/keygen'); diff --git a/lib/internal/crypto/hkdf.js b/lib/internal/crypto/hkdf.js index 8cff0489a553e0..84d39ac61b2b2e 100644 --- a/lib/internal/crypto/hkdf.js +++ b/lib/internal/crypto/hkdf.js @@ -23,7 +23,6 @@ const { kMaxLength } = require('buffer'); const { getArrayBufferOrView, - lazyDOMException, normalizeHashName, toBuf, validateByteSource, @@ -35,6 +34,10 @@ const { isKeyObject, } = require('internal/crypto/keys'); +const { + lazyDOMException, +} = require('internal/util'); + const { isAnyArrayBuffer, isArrayBufferView, diff --git a/lib/internal/crypto/mac.js b/lib/internal/crypto/mac.js index 5ee1f0918db7e1..61fcc88a923dad 100644 --- a/lib/internal/crypto/mac.js +++ b/lib/internal/crypto/mac.js @@ -18,7 +18,6 @@ const { getHashLength, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateBitLength, validateKeyOps, @@ -26,6 +25,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { codes: { ERR_MISSING_OPTION, diff --git a/lib/internal/crypto/pbkdf2.js b/lib/internal/crypto/pbkdf2.js index d600f8f036284b..753c4f2d9da597 100644 --- a/lib/internal/crypto/pbkdf2.js +++ b/lib/internal/crypto/pbkdf2.js @@ -25,11 +25,14 @@ const { ERR_MISSING_OPTION } = require('internal/errors').codes; const { getArrayBufferOrView, getDefaultEncoding, - lazyDOMException, normalizeHashName, kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + function pbkdf2(password, salt, iterations, keylen, digest, callback) { if (typeof digest === 'function') { callback = digest; diff --git a/lib/internal/crypto/random.js b/lib/internal/crypto/random.js index cf84507515a505..5ce158324851d4 100644 --- a/lib/internal/crypto/random.js +++ b/lib/internal/crypto/random.js @@ -27,7 +27,7 @@ const { const { lazyDOMException, -} = require('internal/crypto/util'); +} = require('internal/util'); const { Buffer, kMaxLength } = require('buffer'); diff --git a/lib/internal/crypto/rsa.js b/lib/internal/crypto/rsa.js index e7b793fa184817..a0d27f3715e211 100644 --- a/lib/internal/crypto/rsa.js +++ b/lib/internal/crypto/rsa.js @@ -40,7 +40,6 @@ const { getUsagesUnion, hasAnyNotIn, jobPromise, - lazyDOMException, normalizeHashName, validateKeyOps, validateMaxBufferLength, @@ -48,6 +47,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { isUint8Array, } = require('internal/util/types'); diff --git a/lib/internal/crypto/scrypt.js b/lib/internal/crypto/scrypt.js index 45a04905bfd447..63a5547e4cbf79 100644 --- a/lib/internal/crypto/scrypt.js +++ b/lib/internal/crypto/scrypt.js @@ -30,10 +30,13 @@ const { const { getArrayBufferOrView, getDefaultEncoding, - lazyDOMException, kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const defaults = { N: 16384, r: 8, diff --git a/lib/internal/crypto/util.js b/lib/internal/crypto/util.js index 8343940b99e169..eafcc3d9669288 100644 --- a/lib/internal/crypto/util.js +++ b/lib/internal/crypto/util.js @@ -51,6 +51,7 @@ const { Buffer } = require('buffer'); const { cachedResult, filterDuplicateStrings, + lazyDOMException, } = require('internal/util'); const { @@ -70,13 +71,6 @@ function lazyRequire(name) { return ret; } -let DOMException; -const lazyDOMException = hideStackFrames((message, name) => { - if (DOMException === undefined) - DOMException = internalBinding('messaging').DOMException; - return new DOMException(message, name); -}); - var defaultEncoding = 'buffer'; function setDefaultEncoding(val) { @@ -428,7 +422,6 @@ module.exports = { normalizeAlgorithm, normalizeHashName, hasAnyNotIn, - lazyDOMException, validateBitLength, validateByteLength, validateByteSource, diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index 900ac7a6e8ad7b..0dcdc28a6c2f63 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -49,7 +49,6 @@ const { const { getArrayBufferOrView, hasAnyNotIn, - lazyDOMException, lazyRequire, normalizeAlgorithm, normalizeHashName, @@ -59,6 +58,10 @@ const { kKeyObject, } = require('internal/crypto/util'); +const { + lazyDOMException, +} = require('internal/util'); + const { getRandomValues, } = require('internal/crypto/random'); diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index d356d8726fc482..6007e7384cbca3 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -74,7 +74,7 @@ const { validateString, } = require('internal/validators'); const pathModule = require('path'); -const { promisify } = require('internal/util'); +const { lazyDOMException, promisify } = require('internal/util'); const { EventEmitterMixin } = require('internal/event_target'); const { watch } = require('internal/fs/watchers'); const { isIterable } = require('internal/streams/utils'); @@ -209,8 +209,7 @@ class FileHandle extends EventEmitterMixin(JSTransferable) { [kTransfer]() { if (this[kClosePromise] || this[kRefs] > 1) { - const DOMException = internalBinding('messaging').DOMException; - throw new DOMException('Cannot transfer FileHandle while in use', + throw lazyDOMException('Cannot transfer FileHandle while in use', 'DataCloneError'); } diff --git a/lib/internal/util.js b/lib/internal/util.js index f0a04f61818392..101fbec67775b0 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -28,6 +28,7 @@ const { } = primordials; const { + hideStackFrames, codes: { ERR_INVALID_ARG_TYPE, ERR_NO_CRYPTO, @@ -441,6 +442,13 @@ function createDeferredPromise() { return { promise, resolve, reject }; } +let DOMException; +const lazyDOMException = hideStackFrames((message, name) => { + if (DOMException === undefined) + DOMException = internalBinding('messaging').DOMException; + return new DOMException(message, name); +}); + module.exports = { assertCrypto, cachedResult, @@ -457,6 +465,7 @@ module.exports = { isError, isInsideNodeModules, join, + lazyDOMException, normalizeEncoding, once, promisify, From 7ea98fbccd0d0157129a5cf109b352531db9395d Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 9 Jun 2021 00:03:19 +0800 Subject: [PATCH 021/133] perf_hooks: refactor perf_hooks for snapshot building - Move Performance and InternalPerformance to a new lib/internal/perf/performance.js - Move now() getMilestoneTimestamp() into lib/internal/perf/utils.js - Rename lib/internal/perf/perf.js to lib/internal/perf/performance_entry.js - Refresh time origin at startup (this means the time origins could differ between snapshot building time and snapshot creation time) PR-URL: https://github.com/nodejs/node/pull/38971 Refs: https://github.com/nodejs/node/issues/35711 Reviewed-By: James M Snell --- lib/internal/bootstrap/pre_execution.js | 7 + lib/internal/http.js | 2 +- lib/internal/main/worker_thread.js | 2 + lib/internal/perf/event_loop_utilization.js | 2 +- lib/internal/perf/nodetiming.js | 31 +---- lib/internal/perf/observe.js | 12 +- lib/internal/perf/performance.js | 129 ++++++++++++++++++ .../perf/{perf.js => performance_entry.js} | 12 -- lib/internal/perf/timerify.js | 6 +- lib/internal/perf/usertiming.js | 22 +-- lib/internal/perf/utils.js | 33 +++++ lib/internal/worker.js | 8 +- lib/perf_hooks.js | 109 +-------------- src/node_perf.cc | 22 +-- test/parallel/test-bootstrap-modules.js | 10 ++ 15 files changed, 224 insertions(+), 183 deletions(-) create mode 100644 lib/internal/perf/performance.js rename lib/internal/perf/{perf.js => performance_entry.js} (87%) create mode 100644 lib/internal/perf/utils.js diff --git a/lib/internal/bootstrap/pre_execution.js b/lib/internal/bootstrap/pre_execution.js index 83ccfe90c11065..3b69844dc4ea0c 100644 --- a/lib/internal/bootstrap/pre_execution.js +++ b/lib/internal/bootstrap/pre_execution.js @@ -27,6 +27,7 @@ function prepareMainThreadExecution(expandArgv1 = false) { // Patch the process object with legacy properties and normalizations patchProcessObject(expandArgv1); setupTraceCategoryState(); + setupPerfHooks(); setupInspectorHooks(); setupWarningHandler(); @@ -222,6 +223,11 @@ function setupTraceCategoryState() { toggleTraceCategoryState(isTraceCategoryEnabled('node.async_hooks')); } +function setupPerfHooks() { + require('internal/perf/performance').refreshTimeOrigin(); + require('internal/perf/utils').refreshTimeOrigin(); +} + function setupInspectorHooks() { // If Debugger.setAsyncCallStackDepth is sent during bootstrap, // we cannot immediately call into JS to enable the hooks, which could @@ -474,6 +480,7 @@ module.exports = { setupCoverageHooks, setupWarningHandler, setupDebugEnv, + setupPerfHooks, prepareMainThreadExecution, initializeDeprecations, initializeESMLoader, diff --git a/lib/internal/http.js b/lib/internal/http.js index badfaa5c4a88d8..56187a2b1cc315 100644 --- a/lib/internal/http.js +++ b/lib/internal/http.js @@ -9,7 +9,7 @@ const { const { setUnrefTimeout } = require('internal/timers'); -const { InternalPerformanceEntry } = require('internal/perf/perf'); +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); const { enqueue, diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index e22fd17fa2d214..d6434ff96e1185 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -18,6 +18,7 @@ const { setupInspectorHooks, setupWarningHandler, setupDebugEnv, + setupPerfHooks, initializeDeprecations, initializeWASI, initializeCJSLoader, @@ -114,6 +115,7 @@ port.on('message', (message) => { } = message; setupTraceCategoryState(); + setupPerfHooks(); initializeReport(); if (manifestSrc) { require('internal/process/policy').setup(manifestSrc, manifestURL); diff --git a/lib/internal/perf/event_loop_utilization.js b/lib/internal/perf/event_loop_utilization.js index 398c4ad4e42f58..d73b2f5a831ab9 100644 --- a/lib/internal/perf/event_loop_utilization.js +++ b/lib/internal/perf/event_loop_utilization.js @@ -2,7 +2,7 @@ const nodeTiming = require('internal/perf/nodetiming'); -const { now } = require('internal/perf/perf'); +const { now } = require('internal/perf/utils'); function eventLoopUtilization(util1, util2) { const ls = nodeTiming.loopStart; diff --git a/lib/internal/perf/nodetiming.js b/lib/internal/perf/nodetiming.js index 5ff6dd38cd86d3..fcbd7efff49099 100644 --- a/lib/internal/perf/nodetiming.js +++ b/lib/internal/perf/nodetiming.js @@ -3,15 +3,14 @@ const { ObjectDefineProperties, ObjectSetPrototypeOf, - SafeArrayIterator, - SafeSet, } = primordials; +const { PerformanceEntry } = require('internal/perf/performance_entry'); + const { - PerformanceEntry, - kReadOnlyAttributes, now, -} = require('internal/perf/perf'); + getMilestoneTimestamp, +} = require('internal/perf/utils'); const { customInspectSymbol: kInspect, @@ -29,26 +28,8 @@ const { NODE_PERFORMANCE_MILESTONE_ENVIRONMENT }, loopIdleTime, - milestones, - timeOrigin, } = internalBinding('performance'); -function getMilestoneTimestamp(milestoneIdx) { - const ns = milestones[milestoneIdx]; - if (ns === -1) - return ns; - return ns / 1e6 - timeOrigin; -} - -const readOnlyAttributes = new SafeSet(new SafeArrayIterator([ - 'nodeStart', - 'v8Start', - 'environment', - 'loopStart', - 'loopExit', - 'bootstrapComplete', -])); - class PerformanceNodeTiming { constructor() { ObjectDefineProperties(this, { @@ -159,10 +140,6 @@ class PerformanceNodeTiming { idleTime: this.idleTime, }; } - - static get [kReadOnlyAttributes]() { - return readOnlyAttributes; - } } ObjectSetPrototypeOf( diff --git a/lib/internal/perf/observe.js b/lib/internal/perf/observe.js index c96925c723f64e..8ec8512434510b 100644 --- a/lib/internal/perf/observe.js +++ b/lib/internal/perf/observe.js @@ -31,7 +31,7 @@ const { const { InternalPerformanceEntry, isPerformanceEntry, -} = require('internal/perf/perf'); +} = require('internal/perf/performance_entry'); const { codes: { @@ -174,11 +174,13 @@ class PerformanceObserverEntryList { } class PerformanceObserver { - [kBuffer] = []; - [kEntryTypes] = new SafeSet(); - [kType] = undefined; - constructor(callback) { + // TODO(joyeecheung): V8 snapshot does not support instance member + // initializers for now: + // https://bugs.chromium.org/p/v8/issues/detail?id=10704 + this[kBuffer] = []; + this[kEntryTypes] = new SafeSet(); + this[kType] = undefined; validateCallback(callback); this[kCallback] = callback; } diff --git a/lib/internal/perf/performance.js b/lib/internal/perf/performance.js new file mode 100644 index 00000000000000..ca4aed90e4e270 --- /dev/null +++ b/lib/internal/perf/performance.js @@ -0,0 +1,129 @@ +'use strict'; + +const { + ObjectDefineProperty, + ObjectDefineProperties, + ObjectSetPrototypeOf, + TypeError, +} = primordials; + +const { + EventTarget, +} = require('internal/event_target'); + +const { now } = require('internal/perf/utils'); + +const { + mark, + measure, + clearMarks, +} = require('internal/perf/usertiming'); + +const eventLoopUtilization = require('internal/perf/event_loop_utilization'); +const nodeTiming = require('internal/perf/nodetiming'); +const timerify = require('internal/perf/timerify'); +const { customInspectSymbol: kInspect } = require('internal/util'); +const { inspect } = require('util'); + +const { + getTimeOriginTimestamp +} = internalBinding('performance'); + +class Performance extends EventTarget { + constructor() { + // eslint-disable-next-line no-restricted-syntax + throw new TypeError('Illegal constructor'); + } + + [kInspect](depth, options) { + if (depth < 0) return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `Performance ${inspect({ + nodeTiming: this.nodeTiming, + timeOrigin: this.timeOrigin, + }, opts)}`; + } + +} + +function toJSON() { + return { + nodeTiming: this.nodeTiming, + timeOrigin: this.timeOrigin, + eventLoopUtilization: this.eventLoopUtilization() + }; +} + +class InternalPerformance extends EventTarget {} +InternalPerformance.prototype.constructor = Performance.prototype.constructor; +ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype); + +ObjectDefineProperties(Performance.prototype, { + clearMarks: { + configurable: true, + enumerable: false, + value: clearMarks, + }, + eventLoopUtilization: { + configurable: true, + enumerable: false, + value: eventLoopUtilization, + }, + mark: { + configurable: true, + enumerable: false, + value: mark, + }, + measure: { + configurable: true, + enumerable: false, + value: measure, + }, + nodeTiming: { + configurable: true, + enumerable: false, + value: nodeTiming, + }, + now: { + configurable: true, + enumerable: false, + value: now, + }, + timerify: { + configurable: true, + enumerable: false, + value: timerify, + }, + // This would be updated during pre-execution in case + // the process is launched from a snapshot. + // TODO(joyeecheung): we may want to warn about access to + // this during snapshot building. + timeOrigin: { + configurable: true, + enumerable: true, + value: getTimeOriginTimestamp(), + }, + toJSON: { + configurable: true, + enumerable: true, + value: toJSON, + } +}); + +function refreshTimeOrigin() { + ObjectDefineProperty(Performance.prototype, 'timeOrigin', { + configurable: true, + enumerable: true, + value: getTimeOriginTimestamp(), + }); +} + +module.exports = { + InternalPerformance, + refreshTimeOrigin +}; diff --git a/lib/internal/perf/perf.js b/lib/internal/perf/performance_entry.js similarity index 87% rename from lib/internal/perf/perf.js rename to lib/internal/perf/performance_entry.js index d049d3c68fff04..f9f1c9e8966e2d 100644 --- a/lib/internal/perf/perf.js +++ b/lib/internal/perf/performance_entry.js @@ -6,10 +6,6 @@ const { TypeError, } = primordials; -const { - timeOrigin, -} = internalBinding('performance'); - const { customInspectSymbol: kInspect, } = require('internal/util'); @@ -21,12 +17,6 @@ const kType = Symbol('kType'); const kStart = Symbol('kStart'); const kDuration = Symbol('kDuration'); const kDetail = Symbol('kDetail'); -const kReadOnlyAttributes = Symbol('kReadOnlyAttributes'); - -function now() { - const hr = process.hrtime(); - return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; -} function isPerformanceEntry(obj) { return obj?.[kName] !== undefined; @@ -88,7 +78,5 @@ ObjectSetPrototypeOf( module.exports = { InternalPerformanceEntry, PerformanceEntry, - kReadOnlyAttributes, isPerformanceEntry, - now, }; diff --git a/lib/internal/perf/timerify.js b/lib/internal/perf/timerify.js index d730f62aae7eb1..dae0b06bf80c8a 100644 --- a/lib/internal/perf/timerify.js +++ b/lib/internal/perf/timerify.js @@ -9,10 +9,8 @@ const { Symbol, } = primordials; -const { - InternalPerformanceEntry, - now, -} = require('internal/perf/perf'); +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); +const { now } = require('internal/perf/utils'); const { validateFunction, diff --git a/lib/internal/perf/usertiming.js b/lib/internal/perf/usertiming.js index 6672a3f4dfdeb0..f83091de1919a8 100644 --- a/lib/internal/perf/usertiming.js +++ b/lib/internal/perf/usertiming.js @@ -3,16 +3,13 @@ const { ObjectKeys, SafeMap, + SafeSet, + SafeArrayIterator, } = primordials; -const { - InternalPerformanceEntry, - kReadOnlyAttributes, - now, -} = require('internal/perf/perf'); - +const { InternalPerformanceEntry } = require('internal/perf/performance_entry'); +const { now } = require('internal/perf/utils'); const { enqueue } = require('internal/perf/observe'); - const nodeTiming = require('internal/perf/nodetiming'); const { @@ -31,8 +28,15 @@ const { } = require('internal/errors'); const marks = new SafeMap(); -const nodeTimingReadOnlyAttributes = - nodeTiming.constructor[kReadOnlyAttributes]; + +const nodeTimingReadOnlyAttributes = new SafeSet(new SafeArrayIterator([ + 'nodeStart', + 'v8Start', + 'environment', + 'loopStart', + 'loopExit', + 'bootstrapComplete', +])); function getMark(name) { if (name === undefined) return; diff --git a/lib/internal/perf/utils.js b/lib/internal/perf/utils.js new file mode 100644 index 00000000000000..bcc7e223b8c882 --- /dev/null +++ b/lib/internal/perf/utils.js @@ -0,0 +1,33 @@ +'use strict'; + +const binding = internalBinding('performance'); +const { + milestones, + getTimeOrigin, +} = binding; + +// TODO(joyeecheung): we may want to warn about access to +// this during snapshot building. +let timeOrigin = getTimeOrigin(); + +function now() { + const hr = process.hrtime(); + return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; +} + +function getMilestoneTimestamp(milestoneIdx) { + const ns = milestones[milestoneIdx]; + if (ns === -1) + return ns; + return ns / 1e6 - timeOrigin; +} + +function refreshTimeOrigin() { + timeOrigin = getTimeOrigin(); +} + +module.exports = { + now, + getMilestoneTimestamp, + refreshTimeOrigin +}; diff --git a/lib/internal/worker.js b/lib/internal/worker.js index f2414ebeec4aae..931bce0c518fc3 100644 --- a/lib/internal/worker.js +++ b/lib/internal/worker.js @@ -28,7 +28,7 @@ const { const EventEmitter = require('events'); const assert = require('internal/assert'); const path = require('path'); -const { timeOrigin } = internalBinding('performance'); +const { now } = require('internal/perf/utils'); const errorCodes = require('internal/errors').codes; const { @@ -504,12 +504,6 @@ function eventLoopUtilization(util1, util2) { return { idle: idle_delta, active: active_delta, utilization }; } -// Duplicate code from performance.now() so don't need to require perf_hooks. -function now() { - const hr = process.hrtime(); - return (hr[0] * 1000 + hr[1] / 1e6) - timeOrigin; -} - module.exports = { ownsProcessState, isMainThread, diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index a92a040f2de839..339d3ca4ff0ab4 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -1,126 +1,23 @@ 'use strict'; const { - ObjectDefineProperties, ObjectDefineProperty, - ObjectSetPrototypeOf, - TypeError, } = primordials; const { - timeOriginTimestamp, constants, } = internalBinding('performance'); -const { - EventTarget, -} = require('internal/event_target'); - -const { - PerformanceEntry, - now, -} = require('internal/perf/perf'); +const { PerformanceEntry } = require('internal/perf/performance_entry'); const { PerformanceObserver } = require('internal/perf/observe'); - -const { - PerformanceMark, - mark, - measure, - clearMarks, -} = require('internal/perf/usertiming'); +const { PerformanceMark } = require('internal/perf/usertiming'); +const { InternalPerformance } = require('internal/perf/performance'); const { createHistogram } = require('internal/histogram'); -const eventLoopUtilization = require('internal/perf/event_loop_utilization'); const monitorEventLoopDelay = require('internal/perf/event_loop_delay'); -const nodeTiming = require('internal/perf/nodetiming'); -const timerify = require('internal/perf/timerify'); -const { customInspectSymbol: kInspect } = require('internal/util'); -const { inspect } = require('util'); - -class Performance extends EventTarget { - constructor() { - // eslint-disable-next-line no-restricted-syntax - throw new TypeError('Illegal constructor'); - } - - [kInspect](depth, options) { - if (depth < 0) return this; - - const opts = { - ...options, - depth: options.depth == null ? null : options.depth - 1 - }; - - return `Performance ${inspect({ - nodeTiming: this.nodeTiming, - timeOrigin: this.timeOrigin, - }, opts)}`; - } - -} - -function toJSON() { - return { - nodeTiming: this.nodeTiming, - timeOrigin: this.timeOrigin, - eventLoopUtilization: this.eventLoopUtilization() - }; -} - -class InternalPerformance extends EventTarget {} -InternalPerformance.prototype.constructor = Performance.prototype.constructor; -ObjectSetPrototypeOf(InternalPerformance.prototype, Performance.prototype); - -ObjectDefineProperties(Performance.prototype, { - clearMarks: { - configurable: true, - enumerable: false, - value: clearMarks, - }, - eventLoopUtilization: { - configurable: true, - enumerable: false, - value: eventLoopUtilization, - }, - mark: { - configurable: true, - enumerable: false, - value: mark, - }, - measure: { - configurable: true, - enumerable: false, - value: measure, - }, - nodeTiming: { - configurable: true, - enumerable: false, - value: nodeTiming, - }, - now: { - configurable: true, - enumerable: false, - value: now, - }, - timerify: { - configurable: true, - enumerable: false, - value: timerify, - }, - timeOrigin: { - configurable: true, - enumerable: true, - value: timeOriginTimestamp, - }, - toJSON: { - configurable: true, - enumerable: true, - value: toJSON, - } -}); module.exports = { PerformanceEntry, diff --git a/src/node_perf.cc b/src/node_perf.cc index 8c5778ecb10bb0..41c93ea1640e5e 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -274,6 +274,15 @@ void ELDHistogram::OnInterval() { "stddev", histogram()->Stddev()); } +void GetTimeOrigin(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set(Number::New(args.GetIsolate(), timeOrigin / 1e6)); +} + +void GetTimeOriginTimeStamp(const FunctionCallbackInfo& args) { + args.GetReturnValue().Set( + Number::New(args.GetIsolate(), timeOriginTimestamp / MICROS_PER_MILLIS)); +} + void Initialize(Local target, Local unused, Local context, @@ -308,6 +317,8 @@ void Initialize(Local target, RemoveGarbageCollectionTracking); env->SetMethod(target, "notify", Notify); env->SetMethod(target, "loopIdleTime", LoopIdleTime); + env->SetMethod(target, "getTimeOrigin", GetTimeOrigin); + env->SetMethod(target, "getTimeOriginTimestamp", GetTimeOriginTimeStamp); Local constants = Object::New(isolate); @@ -344,17 +355,6 @@ void Initialize(Local target, PropertyAttribute attr = static_cast(ReadOnly | DontDelete); - target->DefineOwnProperty(context, - FIXED_ONE_BYTE_STRING(isolate, "timeOrigin"), - Number::New(isolate, timeOrigin / 1e6), - attr).ToChecked(); - - target->DefineOwnProperty( - context, - FIXED_ONE_BYTE_STRING(isolate, "timeOriginTimestamp"), - Number::New(isolate, timeOriginTimestamp / MICROS_PER_MILLIS), - attr).ToChecked(); - target->DefineOwnProperty(context, env->constants_string(), constants, diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index f18f85728d36e3..5cf9b8df2a38e6 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -23,6 +23,7 @@ const expectedModules = new Set([ 'Internal Binding module_wrap', 'Internal Binding native_module', 'Internal Binding options', + 'Internal Binding performance', 'Internal Binding process_methods', 'Internal Binding report', 'Internal Binding serdes', @@ -60,6 +61,7 @@ const expectedModules = new Set([ 'NativeModule internal/fs/rimraf', 'NativeModule internal/fs/watchers', 'NativeModule internal/heap_utils', + 'NativeModule internal/histogram', 'NativeModule internal/idna', 'NativeModule internal/linkedlist', 'NativeModule internal/modules/run_main', @@ -77,6 +79,14 @@ const expectedModules = new Set([ 'NativeModule internal/modules/esm/translators', 'NativeModule internal/process/esm_loader', 'NativeModule internal/options', + 'NativeModule internal/perf/event_loop_utilization', + 'NativeModule internal/perf/nodetiming', + 'NativeModule internal/perf/observe', + 'NativeModule internal/perf/performance', + 'NativeModule internal/perf/performance_entry', + 'NativeModule internal/perf/timerify', + 'NativeModule internal/perf/usertiming', + 'NativeModule internal/perf/utils', 'NativeModule internal/priority_queue', 'NativeModule internal/process/execution', 'NativeModule internal/process/per_thread', From 6e46eb186ce891c6aa576f10fd3a229d9d15cb89 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 9 Jun 2021 00:19:04 +0800 Subject: [PATCH 022/133] bootstrap: support perf hooks in snapshot PR-URL: https://github.com/nodejs/node/pull/38971 Refs: https://github.com/nodejs/node/issues/35711 Reviewed-By: James M Snell --- src/histogram.cc | 33 ++++++++++++++++++++++++++++++++- src/histogram.h | 5 +++++ src/node_external_reference.h | 1 + src/node_perf.cc | 25 +++++++++++++++++++++++-- src/node_perf.h | 7 +++++-- 5 files changed, 66 insertions(+), 5 deletions(-) diff --git a/src/histogram.cc b/src/histogram.cc index d21cf2883a0ca8..6fbb0eda6c0792 100644 --- a/src/histogram.cc +++ b/src/histogram.cc @@ -1,8 +1,10 @@ #include "histogram.h" // NOLINT(build/include_inline) -#include "histogram-inl.h" #include "base_object-inl.h" +#include "histogram-inl.h" #include "memory_tracker-inl.h" #include "node_errors.h" +#include "node_external_reference.h" + namespace node { using v8::BigInt; @@ -197,6 +199,21 @@ Local HistogramBase::GetConstructorTemplate( return tmpl; } +void HistogramBase::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(New); + registry->Register(GetExceeds); + registry->Register(GetMin); + registry->Register(GetMax); + registry->Register(GetMean); + registry->Register(GetStddev); + registry->Register(GetPercentile); + registry->Register(GetPercentiles); + registry->Register(DoReset); + registry->Register(Record); + registry->Register(RecordDelta); +} + void HistogramBase::Initialize(Environment* env, Local target) { env->SetConstructorFunction(target, "Histogram", GetConstructorTemplate(env)); } @@ -240,6 +257,20 @@ Local IntervalHistogram::GetConstructorTemplate( return tmpl; } +void IntervalHistogram::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(GetExceeds); + registry->Register(GetMin); + registry->Register(GetMax); + registry->Register(GetMean); + registry->Register(GetStddev); + registry->Register(GetPercentile); + registry->Register(GetPercentiles); + registry->Register(DoReset); + registry->Register(Start); + registry->Register(Stop); +} + IntervalHistogram::IntervalHistogram( Environment* env, Local wrap, diff --git a/src/histogram.h b/src/histogram.h index 8c164f54cfd9ed..00b4f7796f8be6 100644 --- a/src/histogram.h +++ b/src/histogram.h @@ -18,6 +18,8 @@ namespace node { +class ExternalReferenceRegistry; + constexpr int kDefaultHistogramFigures = 3; class Histogram : public MemoryRetainer { @@ -78,6 +80,7 @@ class HistogramBase : public BaseObject, public HistogramImpl { static v8::Local GetConstructorTemplate( Environment* env); static void Initialize(Environment* env, v8::Local target); + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); static BaseObjectPtr Create( Environment* env, @@ -154,6 +157,8 @@ class IntervalHistogram : public HandleWrap, public HistogramImpl { RESET }; + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); + static v8::Local GetConstructorTemplate( Environment* env); diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 2d6bc34322fd61..094558783f770e 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -61,6 +61,7 @@ class ExternalReferenceRegistry { V(heap_utils) \ V(messaging) \ V(native_module) \ + V(performance) \ V(process_methods) \ V(process_object) \ V(task_queue) \ diff --git a/src/node_perf.cc b/src/node_perf.cc index 41c93ea1640e5e..acbb0e0d902c3e 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -1,10 +1,11 @@ +#include "node_perf.h" #include "aliased_buffer.h" #include "env-inl.h" #include "histogram-inl.h" #include "memory_tracker-inl.h" -#include "node_internals.h" -#include "node_perf.h" #include "node_buffer.h" +#include "node_external_reference.h" +#include "node_internals.h" #include "node_process-inl.h" #include "util-inl.h" @@ -250,6 +251,12 @@ void ELDHistogram::Initialize(Environment* env, Local target) { env->SetConstructorFunction(target, "ELDHistogram", tmpl); } +void ELDHistogram::RegisterExternalReferences( + ExternalReferenceRegistry* registry) { + registry->Register(New); + IntervalHistogram::RegisterExternalReferences(registry); +} + ELDHistogram::ELDHistogram( Environment* env, Local wrap, @@ -364,7 +371,21 @@ void Initialize(Local target, ELDHistogram::Initialize(env, target); } +void RegisterExternalReferences(ExternalReferenceRegistry* registry) { + registry->Register(MarkMilestone); + registry->Register(SetupPerformanceObservers); + registry->Register(InstallGarbageCollectionTracking); + registry->Register(RemoveGarbageCollectionTracking); + registry->Register(Notify); + registry->Register(LoopIdleTime); + registry->Register(GetTimeOrigin); + registry->Register(GetTimeOriginTimeStamp); + HistogramBase::RegisterExternalReferences(registry); + ELDHistogram::RegisterExternalReferences(registry); +} } // namespace performance } // namespace node NODE_MODULE_CONTEXT_AWARE_INTERNAL(performance, node::performance::Initialize) +NODE_MODULE_EXTERNAL_REFERENCE(performance, + node::performance::RegisterExternalReferences) diff --git a/src/node_perf.h b/src/node_perf.h index 33cf9f2ec651bf..64913ab9de7c61 100644 --- a/src/node_perf.h +++ b/src/node_perf.h @@ -3,10 +3,11 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS -#include "node.h" -#include "node_perf_common.h" #include "base_object-inl.h" #include "histogram.h" +#include "node.h" +#include "node_internals.h" +#include "node_perf_common.h" #include "v8.h" #include "uv.h" @@ -16,6 +17,7 @@ namespace node { class Environment; +class ExternalReferenceRegistry; namespace performance { @@ -160,6 +162,7 @@ using GCPerformanceEntry = PerformanceEntry; class ELDHistogram : public IntervalHistogram { public: + static void RegisterExternalReferences(ExternalReferenceRegistry* registry); static void Initialize(Environment* env, v8::Local target); static void New(const v8::FunctionCallbackInfo& args); From aafa08d7b942166c8a55e2a871b5f8d3c91b4dd3 Mon Sep 17 00:00:00 2001 From: Joyee Cheung Date: Wed, 9 Jun 2021 00:28:08 +0800 Subject: [PATCH 023/133] bootstrap: load perf_hooks eagerly during bootstrap PR-URL: https://github.com/nodejs/node/pull/38971 Refs: https://github.com/nodejs/node/issues/35711 Reviewed-By: James M Snell --- lib/internal/bootstrap/node.js | 23 +++++++---------------- lib/internal/event_target.js | 10 ++-------- test/parallel/test-bootstrap-modules.js | 2 ++ 3 files changed, 11 insertions(+), 24 deletions(-) diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index 863d4ef5608bce..58f7396990dddb 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -241,10 +241,9 @@ if (!config.noBrowserGlobals) { defineOperation(globalThis, 'queueMicrotask', queueMicrotask); - defineLazyGlobal(globalThis, 'performance', () => { - const { performance } = require('perf_hooks'); - return performance; - }); + // https://www.w3.org/TR/hr-time-2/#the-performance-attribute + defineReplacableAttribute(globalThis, 'performance', + require('perf_hooks').performance); // Non-standard extensions: defineOperation(globalThis, 'clearImmediate', timers.clearImmediate); @@ -494,20 +493,12 @@ function defineOperation(target, name, method) { }); } -function defineLazyGlobal(target, name, loader) { - let value; - let overridden = false; +// https://heycam.github.io/webidl/#Replaceable +function defineReplacableAttribute(target, name, value) { ObjectDefineProperty(target, name, { + writable: true, enumerable: true, configurable: true, - get() { - if (value === undefined && !overridden) - value = loader(); - return value; - }, - set(val) { - value = val; - overridden = true; - } + value, }); } diff --git a/lib/internal/event_target.js b/lib/internal/event_target.js index 026746825b7767..825e1e8b2597ab 100644 --- a/lib/internal/event_target.js +++ b/lib/internal/event_target.js @@ -59,13 +59,7 @@ const kRemoveListener = Symbol('kRemoveListener'); const kIsNodeStyleListener = Symbol('kIsNodeStyleListener'); const kTrustEvent = Symbol('kTrustEvent'); -// Lazy load perf_hooks to avoid the additional overhead on startup -let perf_hooks; -function lazyNow() { - if (perf_hooks === undefined) - perf_hooks = require('perf_hooks'); - return perf_hooks.performance.now(); -} +const { now } = require('internal/perf/utils'); // TODO(joyeecheung): V8 snapshot does not support instance member // initializers for now: @@ -98,7 +92,7 @@ class Event { this[kComposed] = !!composed; this[kType] = `${type}`; this[kDefaultPrevented] = false; - this[kTimestamp] = lazyNow(); + this[kTimestamp] = now(); this[kPropagationStopped] = false; if (options?.[kTrustEvent]) { isTrustedSet.add(this); diff --git a/test/parallel/test-bootstrap-modules.js b/test/parallel/test-bootstrap-modules.js index 5cf9b8df2a38e6..0ca00f31adce8c 100644 --- a/test/parallel/test-bootstrap-modules.js +++ b/test/parallel/test-bootstrap-modules.js @@ -79,6 +79,7 @@ const expectedModules = new Set([ 'NativeModule internal/modules/esm/translators', 'NativeModule internal/process/esm_loader', 'NativeModule internal/options', + 'NativeModule internal/perf/event_loop_delay', 'NativeModule internal/perf/event_loop_utilization', 'NativeModule internal/perf/nodetiming', 'NativeModule internal/perf/observe', @@ -126,6 +127,7 @@ const expectedModules = new Set([ 'NativeModule internal/blob', 'NativeModule async_hooks', 'NativeModule path', + 'NativeModule perf_hooks', 'NativeModule querystring', 'NativeModule stream', 'NativeModule stream/promises', From fff21a4afbc2debf4f4a6bcaba308ce4a9609f46 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 25 Jun 2021 22:11:59 -0700 Subject: [PATCH 024/133] test: replace "inspector-cli" with "debugger" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When I moved node-inspect into core, I called a lot of things `inspector-cli` that really should have been `debugger`. This is the last of them to be renamed. PR-URL: https://github.com/nodejs/node/pull/39156 Reviewed-By: Michaël Zasso Reviewed-By: Antoine du Hamel Reviewed-By: Colin Ihrig --- test/fixtures/{inspector-cli => debugger}/alive.js | 0 test/fixtures/{inspector-cli => debugger}/backtrace.js | 0 test/fixtures/{inspector-cli => debugger}/break.js | 0 test/fixtures/{inspector-cli => debugger}/cjs/index.js | 0 test/fixtures/{inspector-cli => debugger}/cjs/other.js | 0 test/fixtures/{inspector-cli => debugger}/empty.js | 0 test/fixtures/{inspector-cli => debugger}/exceptions.js | 0 test/fixtures/{inspector-cli => debugger}/three-lines.js | 0 test/fixtures/{inspector-cli => debugger}/use-strict.js | 0 test/sequential/test-debugger-address.js | 2 +- test/sequential/test-debugger-auto-resume.js | 2 +- test/sequential/test-debugger-backtrace.js | 2 +- test/sequential/test-debugger-break.js | 2 +- test/sequential/test-debugger-clear-breakpoints.js | 2 +- test/sequential/test-debugger-custom-port.js | 2 +- test/sequential/test-debugger-exceptions.js | 2 +- test/sequential/test-debugger-exec-scope.js | 2 +- test/sequential/test-debugger-exec.js | 2 +- test/sequential/test-debugger-heap-profiler.js | 2 +- test/sequential/test-debugger-help.js | 2 +- test/sequential/test-debugger-invalid-args.js | 2 +- test/sequential/test-debugger-launch.js | 2 +- test/sequential/test-debugger-low-level.js | 4 ++-- test/sequential/test-debugger-pid.js | 2 +- test/sequential/test-debugger-preserve-breaks.js | 2 +- test/sequential/test-debugger-profile.js | 2 +- .../test-debugger-random-port-with-inspect-port.js | 2 +- test/sequential/test-debugger-random-port.js | 2 +- test/sequential/test-debugger-run-after-quit-restart.js | 2 +- test/sequential/test-debugger-sb-before-load.js | 4 ++-- test/sequential/test-debugger-scripts.js | 6 +++--- test/sequential/test-debugger-use-strict.js | 2 +- test/sequential/test-debugger-watchers.js | 2 +- 33 files changed, 28 insertions(+), 28 deletions(-) rename test/fixtures/{inspector-cli => debugger}/alive.js (100%) rename test/fixtures/{inspector-cli => debugger}/backtrace.js (100%) rename test/fixtures/{inspector-cli => debugger}/break.js (100%) rename test/fixtures/{inspector-cli => debugger}/cjs/index.js (100%) rename test/fixtures/{inspector-cli => debugger}/cjs/other.js (100%) rename test/fixtures/{inspector-cli => debugger}/empty.js (100%) rename test/fixtures/{inspector-cli => debugger}/exceptions.js (100%) rename test/fixtures/{inspector-cli => debugger}/three-lines.js (100%) rename test/fixtures/{inspector-cli => debugger}/use-strict.js (100%) diff --git a/test/fixtures/inspector-cli/alive.js b/test/fixtures/debugger/alive.js similarity index 100% rename from test/fixtures/inspector-cli/alive.js rename to test/fixtures/debugger/alive.js diff --git a/test/fixtures/inspector-cli/backtrace.js b/test/fixtures/debugger/backtrace.js similarity index 100% rename from test/fixtures/inspector-cli/backtrace.js rename to test/fixtures/debugger/backtrace.js diff --git a/test/fixtures/inspector-cli/break.js b/test/fixtures/debugger/break.js similarity index 100% rename from test/fixtures/inspector-cli/break.js rename to test/fixtures/debugger/break.js diff --git a/test/fixtures/inspector-cli/cjs/index.js b/test/fixtures/debugger/cjs/index.js similarity index 100% rename from test/fixtures/inspector-cli/cjs/index.js rename to test/fixtures/debugger/cjs/index.js diff --git a/test/fixtures/inspector-cli/cjs/other.js b/test/fixtures/debugger/cjs/other.js similarity index 100% rename from test/fixtures/inspector-cli/cjs/other.js rename to test/fixtures/debugger/cjs/other.js diff --git a/test/fixtures/inspector-cli/empty.js b/test/fixtures/debugger/empty.js similarity index 100% rename from test/fixtures/inspector-cli/empty.js rename to test/fixtures/debugger/empty.js diff --git a/test/fixtures/inspector-cli/exceptions.js b/test/fixtures/debugger/exceptions.js similarity index 100% rename from test/fixtures/inspector-cli/exceptions.js rename to test/fixtures/debugger/exceptions.js diff --git a/test/fixtures/inspector-cli/three-lines.js b/test/fixtures/debugger/three-lines.js similarity index 100% rename from test/fixtures/inspector-cli/three-lines.js rename to test/fixtures/debugger/three-lines.js diff --git a/test/fixtures/inspector-cli/use-strict.js b/test/fixtures/debugger/use-strict.js similarity index 100% rename from test/fixtures/inspector-cli/use-strict.js rename to test/fixtures/debugger/use-strict.js diff --git a/test/sequential/test-debugger-address.js b/test/sequential/test-debugger-address.js index ff31747016c2d4..95dd1c6e3f8283 100644 --- a/test/sequential/test-debugger-address.js +++ b/test/sequential/test-debugger-address.js @@ -37,7 +37,7 @@ function launchTarget(...args) { } { - const script = fixtures.path('inspector-cli/alive.js'); + const script = fixtures.path('debugger/alive.js'); let cli = null; let target = null; diff --git a/test/sequential/test-debugger-auto-resume.js b/test/sequential/test-debugger-auto-resume.js index 9a210176a51705..8a25f5fc804e1a 100644 --- a/test/sequential/test-debugger-auto-resume.js +++ b/test/sequential/test-debugger-auto-resume.js @@ -14,7 +14,7 @@ addLibraryPath(process.env); // Auto-resume on start if the environment variable is defined. { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const env = { ...process.env }; diff --git a/test/sequential/test-debugger-backtrace.js b/test/sequential/test-debugger-backtrace.js index baf2cfe8b58673..f362e98068f15e 100644 --- a/test/sequential/test-debugger-backtrace.js +++ b/test/sequential/test-debugger-backtrace.js @@ -11,7 +11,7 @@ const path = require('path'); // Display and navigate backtrace. { - const scriptFullPath = fixtures.path('inspector-cli', 'backtrace.js'); + const scriptFullPath = fixtures.path('debugger', 'backtrace.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-break.js b/test/sequential/test-debugger-break.js index ec6f354a7e7169..fdfe9bd3c40064 100644 --- a/test/sequential/test-debugger-break.js +++ b/test/sequential/test-debugger-break.js @@ -11,7 +11,7 @@ const path = require('path'); // Stepping through breakpoints. { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-clear-breakpoints.js b/test/sequential/test-debugger-clear-breakpoints.js index 0b9f3b113d5b2f..91349e105a1160 100644 --- a/test/sequential/test-debugger-clear-breakpoints.js +++ b/test/sequential/test-debugger-clear-breakpoints.js @@ -11,7 +11,7 @@ const path = require('path'); // clearBreakpoint { - const scriptFullPath = fixtures.path('inspector-cli', 'break.js'); + const scriptFullPath = fixtures.path('debugger', 'break.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-custom-port.js b/test/sequential/test-debugger-custom-port.js index 85d7e4154a16fe..e6cee10ffa53b5 100644 --- a/test/sequential/test-debugger-custom-port.js +++ b/test/sequential/test-debugger-custom-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Custom port. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([`--port=${common.PORT}`, script]); diff --git a/test/sequential/test-debugger-exceptions.js b/test/sequential/test-debugger-exceptions.js index dc579d0197303a..9b1163316268c7 100644 --- a/test/sequential/test-debugger-exceptions.js +++ b/test/sequential/test-debugger-exceptions.js @@ -11,7 +11,7 @@ const path = require('path'); // Break on (uncaught) exceptions. { - const scriptFullPath = fixtures.path('inspector-cli', 'exceptions.js'); + const scriptFullPath = fixtures.path('debugger', 'exceptions.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-exec-scope.js b/test/sequential/test-debugger-exec-scope.js index 23e376815319c8..9e5d2ac7ebaeeb 100644 --- a/test/sequential/test-debugger-exec-scope.js +++ b/test/sequential/test-debugger-exec-scope.js @@ -10,7 +10,7 @@ const assert = require('assert'); // exec .scope { - const cli = startCLI([fixtures.path('inspector-cli/backtrace.js')]); + const cli = startCLI([fixtures.path('debugger/backtrace.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-exec.js b/test/sequential/test-debugger-exec.js index e1de786ab21302..68a9b37d09d6aa 100644 --- a/test/sequential/test-debugger-exec.js +++ b/test/sequential/test-debugger-exec.js @@ -10,7 +10,7 @@ const assert = require('assert'); { - const cli = startCLI([fixtures.path('inspector-cli/alive.js')]); + const cli = startCLI([fixtures.path('debugger/alive.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-heap-profiler.js b/test/sequential/test-debugger-heap-profiler.js index 8602b8f8d11268..0f0fdc22fbb3b4 100644 --- a/test/sequential/test-debugger-heap-profiler.js +++ b/test/sequential/test-debugger-heap-profiler.js @@ -20,7 +20,7 @@ const filename = 'node.heapsnapshot'; // Heap profiler take snapshot. { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-help.js b/test/sequential/test-debugger-help.js index 78a48b6f9ab159..e24f873212b589 100644 --- a/test/sequential/test-debugger-help.js +++ b/test/sequential/test-debugger-help.js @@ -9,7 +9,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-invalid-args.js b/test/sequential/test-debugger-invalid-args.js index 327b076d78cfb8..05c27b4a0ee20e 100644 --- a/test/sequential/test-debugger-invalid-args.js +++ b/test/sequential/test-debugger-invalid-args.js @@ -41,7 +41,7 @@ const { createServer } = require('net'); }); try { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([`--port=${port}`, script]); const code = await cli.quit(); diff --git a/test/sequential/test-debugger-launch.js b/test/sequential/test-debugger-launch.js index e501a6b6123c47..3bfe541ecca05c 100644 --- a/test/sequential/test-debugger-launch.js +++ b/test/sequential/test-debugger-launch.js @@ -9,7 +9,7 @@ const startCLI = require('../common/debugger'); const assert = require('assert'); { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([script]); cli.waitForInitialBreak() diff --git a/test/sequential/test-debugger-low-level.js b/test/sequential/test-debugger-low-level.js index 2613e4a4bbc2a3..f6d97f2dfe153d 100644 --- a/test/sequential/test-debugger-low-level.js +++ b/test/sequential/test-debugger-low-level.js @@ -9,8 +9,8 @@ const assert = require('assert'); // Debugger agent direct access. { - const cli = startCLI([fixtures.path('inspector-cli/three-lines.js')]); - const scriptPattern = /^\* (\d+): \S+inspector-cli(?:\/|\\)three-lines\.js/m; + const cli = startCLI([fixtures.path('debugger/three-lines.js')]); + const scriptPattern = /^\* (\d+): \S+debugger(?:\/|\\)three-lines\.js/m; function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-pid.js b/test/sequential/test-debugger-pid.js index 97de9f40369d2d..402c1f86dd4ed9 100644 --- a/test/sequential/test-debugger-pid.js +++ b/test/sequential/test-debugger-pid.js @@ -16,7 +16,7 @@ function launchTarget(...args) { } { - const script = fixtures.path('inspector-cli', 'alive.js'); + const script = fixtures.path('debugger', 'alive.js'); let cli = null; let target = null; diff --git a/test/sequential/test-debugger-preserve-breaks.js b/test/sequential/test-debugger-preserve-breaks.js index 6863aaa45ae1b6..a52adf606a1cff 100644 --- a/test/sequential/test-debugger-preserve-breaks.js +++ b/test/sequential/test-debugger-preserve-breaks.js @@ -11,7 +11,7 @@ const path = require('path'); // Run after quit. { - const scriptFullPath = fixtures.path('inspector-cli', 'three-lines.js'); + const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-profile.js b/test/sequential/test-debugger-profile.js index 39b57448ffe628..992c6f71c00775 100644 --- a/test/sequential/test-debugger-profile.js +++ b/test/sequential/test-debugger-profile.js @@ -14,7 +14,7 @@ function delay(ms) { // Profiles. { - const cli = startCLI([fixtures.path('inspector-cli/empty.js')]); + const cli = startCLI([fixtures.path('debugger/empty.js')]); function onFatal(error) { cli.quit(); diff --git a/test/sequential/test-debugger-random-port-with-inspect-port.js b/test/sequential/test-debugger-random-port-with-inspect-port.js index 83c2b68b014b48..5617e130f02585 100644 --- a/test/sequential/test-debugger-random-port-with-inspect-port.js +++ b/test/sequential/test-debugger-random-port-with-inspect-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Random port with --inspect-port=0. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI(['--inspect-port=0', script]); diff --git a/test/sequential/test-debugger-random-port.js b/test/sequential/test-debugger-random-port.js index 22de14661b8f64..da8656cf1c7115 100644 --- a/test/sequential/test-debugger-random-port.js +++ b/test/sequential/test-debugger-random-port.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Random port. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI(['--port=0', script]); diff --git a/test/sequential/test-debugger-run-after-quit-restart.js b/test/sequential/test-debugger-run-after-quit-restart.js index 7f31d467198f23..a9da07dcdff8bd 100644 --- a/test/sequential/test-debugger-run-after-quit-restart.js +++ b/test/sequential/test-debugger-run-after-quit-restart.js @@ -11,7 +11,7 @@ const path = require('path'); // Run after quit/restart. { - const scriptFullPath = fixtures.path('inspector-cli', 'three-lines.js'); + const scriptFullPath = fixtures.path('debugger', 'three-lines.js'); const script = path.relative(process.cwd(), scriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-sb-before-load.js b/test/sequential/test-debugger-sb-before-load.js index c99fb664a70768..586687800e8e90 100644 --- a/test/sequential/test-debugger-sb-before-load.js +++ b/test/sequential/test-debugger-sb-before-load.js @@ -11,10 +11,10 @@ const path = require('path'); // Using sb before loading file. { - const scriptFullPath = fixtures.path('inspector-cli', 'cjs', 'index.js'); + const scriptFullPath = fixtures.path('debugger', 'cjs', 'index.js'); const script = path.relative(process.cwd(), scriptFullPath); - const otherScriptFullPath = fixtures.path('inspector-cli', 'cjs', 'other.js'); + const otherScriptFullPath = fixtures.path('debugger', 'cjs', 'other.js'); const otherScript = path.relative(process.cwd(), otherScriptFullPath); const cli = startCLI([script]); diff --git a/test/sequential/test-debugger-scripts.js b/test/sequential/test-debugger-scripts.js index 893420d2aa4c2d..c6d4e67920921d 100644 --- a/test/sequential/test-debugger-scripts.js +++ b/test/sequential/test-debugger-scripts.js @@ -10,7 +10,7 @@ const assert = require('assert'); // List scripts. { - const script = fixtures.path('inspector-cli', 'three-lines.js'); + const script = fixtures.path('debugger', 'three-lines.js'); const cli = startCLI([script]); function onFatal(error) { @@ -24,7 +24,7 @@ const assert = require('assert'); .then(() => { assert.match( cli.output, - /^\* \d+: \S+inspector-cli(?:\/|\\)three-lines\.js/m, + /^\* \d+: \S+debugger(?:\/|\\)three-lines\.js/m, 'lists the user script'); assert.doesNotMatch( cli.output, @@ -35,7 +35,7 @@ const assert = require('assert'); .then(() => { assert.match( cli.output, - /\* \d+: \S+inspector-cli(?:\/|\\)three-lines\.js/, + /\* \d+: \S+debugger(?:\/|\\)three-lines\.js/, 'lists the user script'); assert.match( cli.output, diff --git a/test/sequential/test-debugger-use-strict.js b/test/sequential/test-debugger-use-strict.js index c5b46b4f00e839..ae82a9fc82352b 100644 --- a/test/sequential/test-debugger-use-strict.js +++ b/test/sequential/test-debugger-use-strict.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Test for files that start with strict directive. { - const script = fixtures.path('inspector-cli', 'use-strict.js'); + const script = fixtures.path('debugger', 'use-strict.js'); const cli = startCLI([script]); function onFatal(error) { diff --git a/test/sequential/test-debugger-watchers.js b/test/sequential/test-debugger-watchers.js index e239e7ac74c216..e856132b74e28a 100644 --- a/test/sequential/test-debugger-watchers.js +++ b/test/sequential/test-debugger-watchers.js @@ -10,7 +10,7 @@ const assert = require('assert'); // Stepping through breakpoints. { - const cli = startCLI([fixtures.path('inspector-cli/break.js')]); + const cli = startCLI([fixtures.path('debugger/break.js')]); function onFatal(error) { cli.quit(); From 74bb915178dd024767b4dbd5b3424e16dd308a73 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 26 Jun 2021 00:24:48 -0700 Subject: [PATCH 025/133] doc: use repository instead of repo MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Stick with "repository" instead of using both "repository" and "repo". PR-URL: https://github.com/nodejs/node/pull/39157 Reviewed-By: Michaël Zasso Reviewed-By: Richard Lau Reviewed-By: Darshan Sen Reviewed-By: Colin Ihrig Reviewed-By: Luigi Pinca Reviewed-By: Harshitha K P --- doc/guides/collaborator-guide.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/guides/collaborator-guide.md b/doc/guides/collaborator-guide.md index ae860f6446eb6f..dae3b8579de07f 100644 --- a/doc/guides/collaborator-guide.md +++ b/doc/guides/collaborator-guide.md @@ -95,7 +95,7 @@ issues. If a user opens a security issue in the public repository: * Ask the user to submit a report through HackerOne as outlined in [SECURITY.md][]. -* Move the issue to the private repo called +* Move the issue to the private repository called [premature-disclosures](https://github.com/nodejs/premature-disclosures). * For any related pull requests, create an associated issue in the `premature-disclosures` repository. Add a copy of the patch for the @@ -552,7 +552,7 @@ Checkout proper target branch: $ git checkout master ``` -Update the tree (assumes your repo is set up as detailed in +Update the tree (assumes your repository is set up as detailed in [CONTRIBUTING.md](./contributing/pull-requests.md#step-1-fork)): ```text From ac05a0a8a3e5b0c094617bed597c9ae095fb69b8 Mon Sep 17 00:00:00 2001 From: bcoe Date: Sat, 12 Jun 2021 10:17:31 -0700 Subject: [PATCH 026/133] errors: don't throw TypeError on missing export MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Logic in module_job.js assumes detailed stack trace from node_errors.cc which is not populated when --enable-source-maps is set. Fixes #38790 PR-URL: https://github.com/nodejs/node/pull/39017 Reviewed-By: Antoine du Hamel Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Benjamin Gruenbaum Reviewed-By: James M Snell Reviewed-By: Michael Dawson --- lib/internal/modules/esm/module_job.js | 10 +++++++++- .../source-map/esm-export-missing-module.mjs | 0 .../esm-export-missing-module.mjs.map | 1 + .../source-map/esm-export-missing.mjs | 2 ++ .../source-map/esm-export-missing.mjs.map | 1 + .../fixtures/source-map/esm-export-missing.ts | 3 +++ test/parallel/test-source-map-enable.js | 19 +++++++++++++++++++ 7 files changed, 35 insertions(+), 1 deletion(-) create mode 100644 test/fixtures/source-map/esm-export-missing-module.mjs create mode 100644 test/fixtures/source-map/esm-export-missing-module.mjs.map create mode 100644 test/fixtures/source-map/esm-export-missing.mjs create mode 100644 test/fixtures/source-map/esm-export-missing.mjs.map create mode 100644 test/fixtures/source-map/esm-export-missing.ts diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index b899c233d45a09..0ef8ebdeb9245b 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -24,6 +24,9 @@ const { const { ModuleWrap } = internalBinding('module_wrap'); const { decorateErrorStack } = require('internal/util'); +const { + getSourceMapsEnabled, +} = require('internal/source_map/source_map_cache'); const assert = require('internal/assert'); const resolvedPromise = PromiseResolve(); @@ -122,7 +125,12 @@ class ModuleJob { } } catch (e) { decorateErrorStack(e); - if (StringPrototypeIncludes(e.message, + // TODO(@bcoe): Add source map support to exception that occurs as result + // of missing named export. This is currently not possible because + // stack trace originates in module_job, not the file itself. A hidden + // symbol with filename could be set in node_errors.cc to facilitate this. + if (!getSourceMapsEnabled() && + StringPrototypeIncludes(e.message, ' does not provide an export named')) { const splitStack = StringPrototypeSplit(e.stack, '\n'); const parentFileUrl = StringPrototypeReplace( diff --git a/test/fixtures/source-map/esm-export-missing-module.mjs b/test/fixtures/source-map/esm-export-missing-module.mjs new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/test/fixtures/source-map/esm-export-missing-module.mjs.map b/test/fixtures/source-map/esm-export-missing-module.mjs.map new file mode 100644 index 00000000000000..17417c928dc04d --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing-module.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"esm-export-missing-module.esm","sourceRoot":"","sources":["./exm-export-missing-module.ts"],"names":[],"mappings":"AAEA,MAAM,UAAU,SAAS;AAEzB,CAAC"} \ No newline at end of file diff --git a/test/fixtures/source-map/esm-export-missing.mjs b/test/fixtures/source-map/esm-export-missing.mjs new file mode 100644 index 00000000000000..4bda755a86b389 --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.mjs @@ -0,0 +1,2 @@ +import { Something } from './esm-export-missing-module.mjs'; +//# sourceMappingURL=esm-export-missing.mjs.map diff --git a/test/fixtures/source-map/esm-export-missing.mjs.map b/test/fixtures/source-map/esm-export-missing.mjs.map new file mode 100644 index 00000000000000..2d1d482dc97083 --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"esm-export-missing.ts","sourceRoot":"","sources":["./esm-export-missing.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,SAAS,EAAE,MAAM,gBAAgB,CAAC;AAC3C,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC"} \ No newline at end of file diff --git a/test/fixtures/source-map/esm-export-missing.ts b/test/fixtures/source-map/esm-export-missing.ts new file mode 100644 index 00000000000000..14797c69feaebc --- /dev/null +++ b/test/fixtures/source-map/esm-export-missing.ts @@ -0,0 +1,3 @@ + +import { Something } from './exm-export-missing-module.mjs'; +console.info(Something); diff --git a/test/parallel/test-source-map-enable.js b/test/parallel/test-source-map-enable.js index e0c222d1a333bf..eab718813fc379 100644 --- a/test/parallel/test-source-map-enable.js +++ b/test/parallel/test-source-map-enable.js @@ -324,6 +324,25 @@ function nextdir() { assert.ok(sourceMap); } +// Does not throw TypeError when exception occurs as result of missing named +// export. +{ + const coverageDirectory = nextdir(); + const output = spawnSync(process.execPath, [ + '--enable-source-maps', + require.resolve('../fixtures/source-map/esm-export-missing.mjs'), + ], { env: { ...process.env, NODE_V8_COVERAGE: coverageDirectory } }); + const sourceMap = getSourceMapFromCache( + 'esm-export-missing.mjs', + coverageDirectory + ); + // Module loader error displayed. + assert.match(output.stderr.toString(), + /does not provide an export named 'Something'/); + // Source map should have been serialized. + assert.ok(sourceMap); +} + function getSourceMapFromCache(fixtureFile, coverageDirectory) { const jsonFiles = fs.readdirSync(coverageDirectory); for (const jsonFile of jsonFiles) { From aec2744e14d65238fa68d2ce72de37a4d1afd18c Mon Sep 17 00:00:00 2001 From: Antoine du Hamel Date: Sun, 27 Jun 2021 09:03:27 +0200 Subject: [PATCH 027/133] doc: remove file name from self-reference links Refs: https://github.com/nodejs/remark-preset-lint-node/pull/188 PR-URL: https://github.com/nodejs/node/pull/39165 Reviewed-By: Rich Trott Reviewed-By: Darshan Sen Reviewed-By: Harshitha K P --- doc/api/fs.md | 2 +- doc/api/process.md | 4 ++-- doc/api/readline.md | 4 ++-- doc/api/timers.md | 14 +++++++------- doc/api/tty.md | 4 ++-- 5 files changed, 14 insertions(+), 14 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index f5c2552bbb8cf3..1fbfacfae972b7 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -6720,7 +6720,7 @@ the file contents. [`fs.copyFile()`]: #fs_fs_copyfile_src_dest_mode_callback [`fs.createReadStream()`]: #fs_fs_createreadstream_path_options [`fs.createWriteStream()`]: #fs_fs_createwritestream_path_options -[`fs.exists()`]: fs.md#fs_fs_exists_path_callback +[`fs.exists()`]: #fs_fs_exists_path_callback [`fs.fstat()`]: #fs_fs_fstat_fd_options_callback [`fs.ftruncate()`]: #fs_fs_ftruncate_fd_len_callback [`fs.futimes()`]: #fs_fs_futimes_fd_atime_mtime_callback diff --git a/doc/api/process.md b/doc/api/process.md index 9dd5d50808b6ae..575e19f66cd051 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -3700,7 +3700,7 @@ cases: [`process.hrtime()`]: #process_process_hrtime_time [`process.hrtime.bigint()`]: #process_process_hrtime_bigint [`process.kill()`]: #process_process_kill_pid_signal -[`process.setUncaughtExceptionCaptureCallback()`]: process.md#process_process_setuncaughtexceptioncapturecallback_fn +[`process.setUncaughtExceptionCaptureCallback()`]: #process_process_setuncaughtexceptioncapturecallback_fn [`promise.catch()`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/catch [`queueMicrotask()`]: globals.md#globals_queuemicrotask_callback [`readable.read()`]: stream.md#stream_readable_read_size @@ -3710,7 +3710,7 @@ cases: [`v8.setFlagsFromString()`]: v8.md#v8_v8_setflagsfromstring_flags [debugger]: debugger.md [deprecation code]: deprecations.md -[note on process I/O]: process.md#process_a_note_on_process_i_o +[note on process I/O]: #process_a_note_on_process_i_o [process.cpuUsage]: #process_process_cpuusage_previousvalue [process_emit_warning]: #process_process_emitwarning_warning_type_code_ctor [process_warning]: #process_event_warning diff --git a/doc/api/readline.md b/doc/api/readline.md index 58c7209b7619f1..72265f803eca8e 100644 --- a/doc/api/readline.md +++ b/doc/api/readline.md @@ -971,8 +971,8 @@ const { createInterface } = require('readline'); [TTY]: tty.md [TTY keybindings]: #readline_tty_keybindings [Writable]: stream.md#stream_writable_streams -[`'SIGCONT'`]: readline.md#readline_event_sigcont -[`'SIGTSTP'`]: readline.md#readline_event_sigtstp +[`'SIGCONT'`]: #readline_event_sigcont +[`'SIGTSTP'`]: #readline_event_sigtstp [`'line'`]: #readline_event_line [`fs.ReadStream`]: fs.md#fs_class_fs_readstream [`process.stdin`]: process.md#process_process_stdin diff --git a/doc/api/timers.md b/doc/api/timers.md index 820e6a282aeb01..89f9ad312e33c4 100644 --- a/doc/api/timers.md +++ b/doc/api/timers.md @@ -472,12 +472,12 @@ const interval = 100; [Event Loop]: https://nodejs.org/en/docs/guides/event-loop-timers-and-nexttick/#setimmediate-vs-settimeout [`AbortController`]: globals.md#globals_class_abortcontroller [`TypeError`]: errors.md#errors_class_typeerror -[`clearImmediate()`]: timers.md#timers_clearimmediate_immediate -[`clearInterval()`]: timers.md#timers_clearinterval_timeout -[`clearTimeout()`]: timers.md#timers_cleartimeout_timeout -[`setImmediate()`]: timers.md#timers_setimmediate_callback_args -[`setInterval()`]: timers.md#timers_setinterval_callback_delay_args -[`setTimeout()`]: timers.md#timers_settimeout_callback_delay_args +[`clearImmediate()`]: #timers_clearimmediate_immediate +[`clearInterval()`]: #timers_clearinterval_timeout +[`clearTimeout()`]: #timers_cleartimeout_timeout +[`setImmediate()`]: #timers_setimmediate_callback_args +[`setInterval()`]: #timers_setinterval_callback_delay_args +[`setTimeout()`]: #timers_settimeout_callback_delay_args [`util.promisify()`]: util.md#util_util_promisify_original [`worker_threads`]: worker_threads.md -[primitive]: timers.md#timers_timeout_symbol_toprimitive +[primitive]: #timers_timeout_symbol_toprimitive diff --git a/doc/api/tty.md b/doc/api/tty.md index 3cf37ff9cb7cdf..df050c8ae9e96f 100644 --- a/doc/api/tty.md +++ b/doc/api/tty.md @@ -211,10 +211,10 @@ added: v0.7.7 * Returns: {number[]} -`writeStream.getWindowSize()` returns the size of the [TTY](tty.md) +`writeStream.getWindowSize()` returns the size of the TTY corresponding to this `WriteStream`. The array is of the type `[numColumns, numRows]` where `numColumns` and `numRows` represent the number -of columns and rows in the corresponding [TTY](tty.md). +of columns and rows in the corresponding TTY. ### `writeStream.hasColors([count][, env])` ` line comment\n this.skipLineComment(3)\n this.skipSpace()\n return this.nextToken()\n }\n return this.finishOp(tt.incDec, 2)\n }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.plusMin, 1)\n}\n\npp.readToken_lt_gt = function(code) { // '<>'\n let next = this.input.charCodeAt(this.pos + 1)\n let size = 1\n if (next === code) {\n size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2\n if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1)\n return this.finishOp(tt.bitShift, size)\n }\n if (next === 33 && code === 60 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 45 &&\n this.input.charCodeAt(this.pos + 3) === 45) {\n // `` line comment\n this.skipLineComment(3)\n this.skipSpace()\n return this.nextToken()\n }\n return this.finishOp(tt.incDec, 2)\n }\n if (next === 61) return this.finishOp(tt.assign, 2)\n return this.finishOp(tt.plusMin, 1)\n}\n\npp.readToken_lt_gt = function(code) { // '<>'\n let next = this.input.charCodeAt(this.pos + 1)\n let size = 1\n if (next === code) {\n size = code === 62 && this.input.charCodeAt(this.pos + 2) === 62 ? 3 : 2\n if (this.input.charCodeAt(this.pos + size) === 61) return this.finishOp(tt.assign, size + 1)\n return this.finishOp(tt.bitShift, size)\n }\n if (next === 33 && code === 60 && !this.inModule && this.input.charCodeAt(this.pos + 2) === 45 &&\n this.input.charCodeAt(this.pos + 3) === 45) {\n // ` + +#### `new ReadableStream([underlyingSource [, strategy]])` + + + +* `underlyingSource` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `ReadableStream` is created. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `pull` {Function} A user-defined function that is called repeatedly when the + `ReadableStream` internal queue is not full. The operation may be sync or + async. If async, the function will not be called again until the previously + returned promise is fulfilled. + * `controller` {ReadableStreamDefaultController|ReadableByteStreamController} + * Returns: A promise fulfilled with `undefined`. + * `cancel` {Function} A user-defined function that is called when the + `ReadableStream` is canceled. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {string} Must be `'bytes'` or `undefined`. + * `autoAllocateChunkSize` {number} Used only when `type` is equal to + `'bytes'`. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + + +#### `readableStream.locked` + + +* Type: {boolean} Set to `true` if there is an active reader for this + {ReadableStream}. + +The `readableStream.locked` property is `false` by default, and is +switch to `true` while there is an active reader consuming the +stream's data. + +#### `readableStream.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined` once cancelation has + been completed. + +#### `readableStream.getReader([options])` + + +* `options` {Object} + * `mode` {string} `'byob'` or `undefined` +* Returns: {ReadableStreamDefaultReader|ReadableStreamBYOBReader} + +```mjs +import { ReadableStream } from 'node:stream/web'; + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +console.log(await reader.read()); +``` + +```cjs +const { ReadableStream } = require('stream/web'); + +const stream = new ReadableStream(); + +const reader = stream.getReader(); + +reader.read().then(console.log); +``` + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.pipeThrough(transform[, options])` + + +* `transform` {Object} + * `readable` {ReadableStream} The `ReadableStream` to which + `transform.writable` will push the potentially modified data + is receives from this `ReadableStream`. + * `writable` {WritableStream} The `WritableStream` to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: {ReadableStream} From `transform.readable`. + +Connects this {ReadableStream} to the pair of {ReadableStream} and +{WritableStream} provided in the `transform` argument such that the +data from this {ReadableStream} is written in to `transform.writable`, +possibly transformed, then pushed to `transform.readable`. Once the +pipeline is configured, `transform.readable` is returned. + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +```mjs +import { + ReadableStream, + TransformStream, +} from 'node:stream/web'; + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +for await (const chunk of transformedStream) + console.log(chunk); +``` + +```cjs +const { + ReadableStream, + TransformStream, +} = require('stream/web'); + +const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, +}); + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +const transformedStream = stream.pipeThrough(transform); + +(async () => { + for await (const chunk of transformedStream) + console.log(chunk); +})(); +``` + +#### `readableStream.pipeTo(destination, options)` + + +* `destination` {WritableStream} A {WritableStream} to which this + `ReadableStream`'s data will be written. +* `options` {Object} + * `preventAbort` {boolean} When `true`, errors in this `ReadableStream` + will not cause `transform.writable` to be aborted. + * `preventCancel` {boolean} When `true`, errors in the destination + `transform.writable` is not cause this `ReadableStream` to be + canceled. + * `preventClose` {boolean} When `true`, closing this `ReadableStream` + will no cause `transform.writable` to be closed. + * `signal` {AbortSignal} Allows the transfer of data to be canceled + using an {AbortController}. +* Returns: A promise fulfilled with `undefined` + +Causes the `readableStream.locked` to be `true` while the pipe operation +is active. + +#### `readableStream.tee()` + + +* Returns: {ReadableStream[]} + +Returns a pair of new {ReadableStream} instances to which this +`ReadableStream`'s data will be forwarded. Each will receive the +same data. + +Causes the `readableStream.locked` to be `true`. + +#### `readableStream.values([options])` + + +* `options` {Object} + * `preventCancel` {boolean} When `true`, prevents the {ReadableStream} + from being closed when the async iterator abruptly terminates. + **Defaults**: `false` + +Creates and returns an async iterator usable for consuming this +`ReadableStream`'s data. + +Causes the `readableStream.locked` to be `true` while the async iterator +is active. + +```mjs +import { Buffer } from 'node:buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream.values({ preventCancel: true })) + console.log(Buffer.from(chunk).toString()); +``` + +#### Async Iteration + +The {ReadableStream} object supports the async iterator protocol using +`for await` syntax. + +```mjs +import { Buffer } from 'buffer'; + +const stream = new ReadableStream(getSomeSource()); + +for await (const chunk of stream) + console.log(Buffer.from(chunk).toString()); +``` + +The async iterator will consume the {ReadableStream} until it terminates. + +By default, if the async iterator exits early (via either a `break`, +`return`, or a `throw`), the {ReadableStream} will be closed. To prevent +automatic closing of the {ReadableStream}, use the `readableStream.values()` +method to acquire the async iterator and set the `preventCancel` option to +`true`. + +The {ReadableStream} must not be locked (that is, it must not have an existing +active reader). During the async iteration, the {ReadableStream} will be locked. + +#### Transfering with `postMessage()` + +A {ReadableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new ReadableStream(getReadableSourceSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getReader().read().then((chunk) => { + console.log(chunk); + }); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `ReadableStreamDefaultReader` + + +By default, calling `readableStream.getReader()` with no arguments +will return an instance of `ReadableStreamDefaultReader`. The default +reader treats the chunks of data passed through the stream as opaque +values, which allows the {ReadableStream} to work with generally any +JavaScript value. + +#### `new ReadableStreamDefaultReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new {ReadableStreamDefaultReader} that is locked to the +given {ReadableStream}. + +#### `readableStreamDefaultReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamDefaultReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamDefaultReader.read()` + + +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +#### `readableStreamDefaultReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamBYOBReader` + + +The `ReadableStreamBYOBReader` is an alternative consumer for +byte-oriented {ReadableStream}'s (those that are created with +`underlyingSource.type` set equal to `'bytes`` when the +`ReadableStream` was created). + +The `BYOB` is short for "bring your own buffer". This is a +pattern that allows for more efficient reading of byte-oriented +data that avoids extraneous copying. + +```mjs +import { + open +} from 'node:fs/promises'; + +import { + ReadableStream +} from 'node:stream/web'; + +import { Buffer } from 'node:buffer'; + +class Source { + type = 'bytes'; + autoAllocateChunkSize = 1024; + + async start(controller) { + this.file = await open(new URL(import.meta.url)); + this.controller = controller; + } + + async pull(controller) { + const view = controller.byobRequest?.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + controller.byobRequest.respond(bytesRead); + } +} + +const stream = new ReadableStream(new Source()); + +async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); +} + +const data = await read(stream); +console.log(Buffer.from(data).toString()); +``` + +#### `new ReadableStreamBYOBReader(stream)` + + +* `stream` {ReadableStream} + +Creates a new `ReadableStreamBYOBReader` that is locked to the +given {ReadableStream}. + +#### `readableStreamBYOBReader.cancel([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Cancels the {ReadableStream} and returns a promise that is fulfilled +when the underlying stream has been canceled. + +#### `readableStreamBYOBReader.closed` + + +* Type: {Promise} Fulfilled with `undefined` when the associated + {ReadableStream} is closed or this reader's lock is released. + +#### `readableStreamBYOBReader.read(view)` + + +* `view` {Buffer|TypedArray|DataView} +* Returns: A promise fulfilled with an object: + * `value` {ArrayBuffer} + * `done` {boolean} + +Requests the next chunk of data from the underlying {ReadableStream} +and returns a promise that is fulfilled with the data once it is +available. + +Do not pass a pooled {Buffer} object instance in to this method. +Pooled `Buffer` objects are created using `Buffer.allocUnsafe()`, +or `Buffer.from()`, or are often returned by various `fs` module +callbacks. These types of `Buffer`s use a shared underlying +{ArrayBuffer} object that contains all of the data from all of +the pooled `Buffer` instances. When a `Buffer`, {TypedArray}, +or {DataView} is passed in to `readableStreamBYOBReader.read()`, +the view's underlying `ArrayBuffer` is *detached*, invalidating +all existing views that may exist on that `ArrayBuffer`. This +can have disasterous consequences for your application. + +#### `readableStreamBYOBReader.releaseLock()` + + +Releases this reader's lock on the underlying {ReadableStream}. + +### Class: `ReadableStreamDefaultController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableStreamDefaultController` is the default controller +implementation for `ReadableStream`s that are not byte-oriented. + +#### `readableStreamDefaultController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableStreamDefaultController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableStreamDefaultController.enqueue(chunk)` + + +* `chunk` {any} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableStreamDefaultController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableByteStreamController` + + +Every {ReadableStream} has a controller that is responsible for +the internal state and management of the stream's queue. The +`ReadableByteStreamController` is for byte-oriented `ReadableStream`s. + +#### `readableByteStreamController.byobRequest` + + +* Type: {ReadableStreamBYOBRequest} + +#### `readableByteStreamController.close()` + + +Closes the {ReadableStream} to which this controller is associated. + +#### `readableByteStreamController.desiredSize` + + +* Type: {number} + +Returns the amount of data remaining to fill the {ReadableStream}'s +queue. + +#### `readableByteStreamController.enqueue(chunk)` + + +* `chunk`: {Buffer|TypedArray|DataView} + +Appends a new chunk of data to the {ReadableStream}'s queue. + +#### `readableByteStreamController.error(error)` + + +* `error` {any} + +Signals an error that causes the {ReadableStream} to error and close. + +### Class: `ReadableStreamBYOBRequest` + + +When using `ReadableByteStreamController` in byte-oriented +streams, and when using the `ReadableStreamBYOBReader`, +the `readableByteStreamController.byobRequest` property +provides access to a `ReadableStreamBYOBRequest` instance +that represents the current read request. The object +is used to gain access to the `ArrayBuffer`/`TypedArray` +that has been provided for the read request to fill, +and provides methods for signaling that the data has +been provided. + +#### `readableStreamBYOBRequest.respond(bytesWritten)` + + +* `bytesWritten` {number} + +Signals that a `bytesWritten` number of bytes have been written +to `readableStreamBYOBRequest.view`. + +#### `readableStreamBYOBRequest.respondWithNewView(view)` + + +* `view` {Buffer|TypedArray|DataView} + +Signals that the request has been fulfilled with bytes written +to a new `Buffer`, `TypedArray`, or `DataView`. + +#### `readableStreamBYOBRequest.view` + + +* Type: {Buffer|TypedArray|DataView} + +### Class: `WritableStream` + + +The `WritableStream` is a destination to which stream data is sent. + +```mjs +import { + WritableStream +} from 'node:stream/web'; + +const stream = new WritableStream({ + write(chunk) { + console.log(chunk); + } +}); + +await stream.getWriter().write('Hello World'); +``` + +#### `new WritableStream([underlyingSink[, strategy]])` + + +* `underlyingSink` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `WritableStream` is created. + * `controller` {WritableStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined`. + * `write` {Function} A user-defined function that is invoked when a chunk of + data has been written to the `WritableStream`. + * `chunk` {any} + * `controller` {WritableStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `close` {Function} A user-defined function that is called when the + `WritableStream` is closed. + * Returns: A promise fulfilled with `undefined`. + * `abort` {Function} A user-defined function that is called to abruptly close + the `WritableStream`. + * `reason` {any} + * Returns: A promise fulfilled with `undefined`. + * `type` {any} The `type` option is reserved for future use and *must* be + undefined. +* `strategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `writableStream.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStream.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStream.getWriter()` + + +* Returns: {WritableStreamDefaultWriter} + +Creates and creates a new writer instance that can be used to write +data into the `WritableStream`. + +#### `writableStream.locked` + + +* Type: {boolean} + +The `writableStream.locked` property is `false` by default, and is +switched to `true` while there is an active writer attached to this +`WritableStream`. + +#### Transfering with postMessage() + +A {WritableStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new WritableStream(getWritableSinkSomehow()); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + data.getWriter().write('hello'); +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `WritableStreamDefaultWriter` + + +#### `new WritableStreamDefaultWriter(stream)` + + +* `stream` {WritableStream} + +Creates a new `WritableStreamDefaultWriter` that is locked to the given +`WritableStream`. + +#### `writableStreamDefaultWriter.abort([reason])` + + +* `reason` {any} +* Returns: A promise fulfilled with `undefined`. + +Abruptly terminates the `WritableStream`. All queued writes will be +canceled with their associated promises rejected. + +#### `writableStreamDefaultWriter.close()` + + +* Returns: A promise fulfilled with `undefined`. + +Closes the `WritableStream` when no additional writes are expected. + +#### `writableStreamDefaultWriter.closed` + + +* Type: A promise that is fulfilled with `undefined` when the + associated {WritableStream} is closed or this writer's lock is + released. + +#### `writableStreamDefaultWriter.desiredSize` + + +* Type: {number} + +The amount of data required to fill the {WritableStream}'s queue. + +#### `writableStreamDefaultWriter.ready` + + +* type: A promise that is fulfilled with `undefined` when the + writer is ready to be used. + +#### `writableStreamDefaultWriter.releaseLock()` + + +Releases this writer's lock on the underlying {ReadableStream}. + +#### `writableStreamDefaultWriter.write([chunk])` + + +* `chunk`: {any} +* Returns: A promise fulfilled with `undefined`. + +Appends a new chunk of data to the {WritableStream}'s queue. + +### Class: `WritableStreamDefaultController` + + +The `WritableStreamDefaultController` manage's the {WritableStream}'s +internal state. + +#### `writableStreamDefaultController.abortReason` + +* Type: {any} The `reason` value passed to `writableStream.abort()`. + +#### `writableStreamDefaultController.error(error)` + + +* `error` {any} + +Called by user-code to signal that an error has occurred while processing +the `WritableStream` data. When called, the {WritableStream} will be aborted, +with currently pending writes canceled. + +#### `writableStreamDefaultController.signal` + +* Type: {AbortSignal} An `AbortSignal` that can be used to cancel pending + write or close operations when a {WritableStream} is aborted. + +### Class: `TransformStream` + + +A `TransformStream` consists of a {ReadableStream} and a {WritableStream} that +are connected such that the data written to the `WritableStream` is received, +and potentially transformed, before being pushed into the `ReadableStream`'s +queue. + +```mjs +import { + TransformStream +} from 'node:stream/web'; + +const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } +}); + +await Promise.all([ + transform.writable.getWriter().write('A'), + transform.readable.getReader().read(), +]); +``` + +#### `new TransformStream([transformer[, writableStrategy[, readableStrategy]]])` + + +* `transformer` {Object} + * `start` {Function} A user-defined function that is invoked immediately when + the `TransformStream` is created. + * `controller` {TransformStreamDefaultController} + * Returns: `undefined` or a promise fulfilled with `undefined` + * `transform` {Function} A user-defined function that receives, and + potentially modifies, a chunk of data written to `transformStream.writable`, + before forwarding that on to `transformStream.readable`. + * `chunk` {any} + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `flush` {Function} A user-defined function that is called immediately before + the writable side of the `TransformStream` is closed, signaling the end of + the transformation process. + * `controller` {TransformStreamDefaultController} + * Returns: A promise fulfilled with `undefined`. + * `readableType` {any} the `readableType` option is reserved for future use + and *must* be `undefined. + * `writableType` {any} the `writableType` option is reserved for future use + and *must* be `undefined. +* `writableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} +* `readableStrategy` {Object} + * `highWaterMark` {number} The maximum internal queue size before backpressure + is applied. + * `size` {Function} A user-defined function used to identify the size of each + chunk of data. + * `chunk` {any} + * Returns: {number} + +#### `transformStream.readable` + + +* Type: {ReadableStream} + +#### `transformStream.writable` + + +* Type: {WritableStream} + +#### Transfering with postMessage() + +A {TransformStream} instance can be transferred using a {MessagePort}. + +```js +const stream = new TransformStream(); + +const { port1, port2 } = new MessageChannel(); + +port1.onmessage = ({ data }) => { + const { writable, readable } = data; + // ... +}; + +port2.postMessage(stream, [stream]); +``` + +### Class: `TransformStreamDefaultController` + + +The `TransformStreamDefaultController` manages the internal state +of the `TransformStream`. + +#### `transformStreamDefaultController.desiredSize` + + +* Type: {number} + +The amount of data required to fill the readable side's queue. + +#### `transformStreamDefaultController.enqueue([chunk])` + + +* `chunk` {any} + +Appends a chunk of data to the readable side's queue. + +#### `transformStreamDefaultController.error([reason])` + + +* `reason` {any} + +Signals to both the readable and writable side that an error has occured +while processing the transform data, causing both sides to be abruptly +closed. + +#### `transformStreamDefaultController.terminate()` + + +Closes the readable side of the transport and causes the writable side +to be abruptly closed with an error. + +### Class: `ByteLengthQueuingStrategy` + + +#### `new ByteLengthQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `byteLengthQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `byteLengthQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +### Class: `CountQueuingStrategy` + + +#### `new CountQueuingStrategy(options)` + + +* `options` {Object} + * `highWaterMark` {number} + +#### `countQueuingStrategy.highWaterMark` + + +* Type: {number} + +#### `countQueuingStrategy.size` + + +* Type: {Function} + * `chunk` {any} + * Returns: {number} + +[Streams]: stream.md +[WHATWG Streams Standard]: https://streams.spec.whatwg.org/ diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index 6c80aa7bf4f2b3..e6ee07052617d5 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -143,6 +143,7 @@ ObjectDefineProperty(AbortController.prototype, SymbolToStringTag, { }); module.exports = { + kAborted, AbortController, AbortSignal, }; diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 13b56311d370b8..ed3fa3787e5eec 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1033,6 +1033,7 @@ E('ERR_HTTP_SOCKET_ENCODING', 'Changing the socket encoding is not allowed per RFC7230 Section 3.', Error); E('ERR_HTTP_TRAILER_INVALID', 'Trailers are invalid with this transfer encoding', Error); +E('ERR_ILLEGAL_CONSTRUCTOR', 'Illegal constructor', TypeError); E('ERR_INCOMPATIBLE_OPTION_PAIR', 'Option "%s" cannot be used in combination with option "%s"', TypeError); E('ERR_INPUT_TYPE_NOT_ALLOWED', '--input-type can only be used with string ' + @@ -1256,8 +1257,8 @@ E('ERR_INVALID_RETURN_VALUE', (input, name, value) => { } return `Expected ${input} to be returned from the "${name}"` + ` function but got ${type}.`; -}, TypeError); -E('ERR_INVALID_STATE', 'Invalid state: %s', Error); +}, TypeError, RangeError); +E('ERR_INVALID_STATE', 'Invalid state: %s', Error, TypeError, RangeError); E('ERR_INVALID_SYNC_FORK_INPUT', 'Asynchronous forks do not support ' + 'Buffer, TypedArray, DataView or string input: %s', @@ -1361,7 +1362,7 @@ E('ERR_NO_CRYPTO', 'Node.js is not compiled with OpenSSL crypto support', Error); E('ERR_NO_ICU', '%s is not supported on Node.js compiled without ICU', TypeError); -E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error); +E('ERR_OPERATION_FAILED', 'Operation failed: %s', Error, TypeError); E('ERR_OUT_OF_RANGE', (str, range, input, replaceDefaultBoolean = false) => { assert(range, 'Missing "range" argument'); diff --git a/lib/internal/per_context/primordials.js b/lib/internal/per_context/primordials.js index 42250ffb422d6e..4dfb4dea85ef2a 100644 --- a/lib/internal/per_context/primordials.js +++ b/lib/internal/per_context/primordials.js @@ -415,5 +415,10 @@ primordials.SafePromisePrototypeFinally = (thisPromise, onFinally) => .then(a, b) ); +primordials.AsyncIteratorPrototype = + primordials.ReflectGetPrototypeOf( + primordials.ReflectGetPrototypeOf( + async function* () {}).prototype); + ObjectSetPrototypeOf(primordials, null); ObjectFreeze(primordials); diff --git a/lib/internal/webstreams/queuingstrategies.js b/lib/internal/webstreams/queuingstrategies.js new file mode 100644 index 00000000000000..d8750665bd5e86 --- /dev/null +++ b/lib/internal/webstreams/queuingstrategies.js @@ -0,0 +1,168 @@ +'use strict'; + +const { + ObjectDefineProperties, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_INVALID_THIS, + ERR_MISSING_OPTION, + }, +} = require('internal/errors'); + +const { + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + customInspect, + isBrandCheck, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + validateObject, +} = require('internal/validators'); + +const isByteLengthQueuingStrategy = + isBrandCheck('ByteLengthQueuingStrategy'); + +const isCountQueuingStrategy = + isBrandCheck('CountQueuingStrategy'); + +/** + * @callback QueuingStrategySize + * @param {any} chunk + * @returns {number} + * + * @typedef {{ + * highWaterMark : number, + * size? : QueuingStrategySize, + * }} QueuingStrategy + */ + +// eslint-disable-next-line func-name-matching,func-style +const byteSizeFunction = function size(chunk) { return chunk.byteLength; }; + +// eslint-disable-next-line func-name-matching,func-style +const countSizeFunction = function size() { return 1; }; + +/** + * @type {QueuingStrategy} + */ +class ByteLengthQueuingStrategy { + [kType] = 'ByteLengthQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isByteLengthQueuingStrategy(this)) + throw new ERR_INVALID_THIS('ByteLengthQueuingStrategy'); + return byteSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(ByteLengthQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +/** + * @type {QueuingStrategy} + */ +class CountQueuingStrategy { + [kType] = 'CountQueuingStrategy'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {{ + * highWaterMark : number + * }} init + */ + constructor(init) { + validateObject(init, 'init'); + if (init.highWaterMark === undefined) + throw new ERR_MISSING_OPTION('options.highWaterMark'); + + // The highWaterMark value is not checked until the strategy + // is actually used, per the spec. + this[kState] = { + highWaterMark: +init.highWaterMark, + }; + } + + /** + * @readonly + * @type {number} + */ + get highWaterMark() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return this[kState].highWaterMark; + } + + /** + * @type {QueuingStrategySize} + */ + get size() { + if (!isCountQueuingStrategy(this)) + throw new ERR_INVALID_THIS('CountQueuingStrategy'); + return countSizeFunction; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + highWaterMark: this.highWaterMark, + }); + } +} + +ObjectDefineProperties(CountQueuingStrategy.prototype, { + highWaterMark: { enumerable: true }, + size: { enumerable: true }, +}); + +module.exports = { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js new file mode 100644 index 00000000000000..a8024c64af2353 --- /dev/null +++ b/lib/internal/webstreams/readablestream.js @@ -0,0 +1,2740 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayBuffer, + ArrayBufferPrototypeSlice, + ArrayPrototypePush, + ArrayPrototypeShift, + DataViewCtor, + FunctionPrototypeBind, + FunctionPrototypeCall, + MathMin, + NumberIsInteger, + ObjectCreate, + ObjectDefineProperties, + ObjectSetPrototypeOf, + Promise, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + PromiseAll, + ReflectConstruct, + Symbol, + SymbolAsyncIterator, + SymbolToStringTag, + Uint8Array, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + isArrayBufferView, + isDataView, +} = require('util/types'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + serialize, + deserialize, +} = require('v8'); + +const { + validateObject, +} = require('internal/validators'); + +const { + kAborted, +} = require('internal/abort_controller'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + queueMicrotask, +} = require('internal/process/task_queues'); + +const { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpPull, + nonOpStart, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + WritableStreamDefaultWriter, + + isWritableStream, + isWritableStreamLocked, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + writableStreamAbort, + writableStreamCloseQueuedOrInFlight, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterWrite, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +const kCancel = Symbol('kCancel'); +const kClose = Symbol('kClose'); +const kChunk = Symbol('kChunk'); +const kError = Symbol('kError'); +const kPull = Symbol('kPull'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy} QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * @typedef {import('./writablestream').WritableStream} WritableStream + * + * @typedef {ReadableStreamDefaultController | ReadableByteStreamController + * } ReadableStreamController + * + * @typedef {ReadableStreamDefaultReader | ReadableStreamBYOBReader + * } ReadableStreamReader + * + * @callback UnderlyingSourceStartCallback + * @param {ReadableStreamController} controller + * @returns { any | Promise } + * + * @callback UnderlyingSourcePullCallback + * @param {ReadableStreamController} controller + * @returns { Promise } + * + * @callback UnderlyingSourceCancelCallback + * @param {any} reason + * @returns { Promise } + * + * @typedef {{ + * readable: ReadableStream, + * writable: WritableStream, + * }} ReadableWritablePair + * + * @typedef {{ + * preventClose? : boolean, + * preventAbort? : boolean, + * preventCancel? : boolean, + * signal? : AbortSignal, + * }} StreamPipeOptions + * + * @typedef {{ + * start? : UnderlyingSourceStartCallback, + * pull? : UnderlyingSourcePullCallback, + * cancel? : UnderlyingSourceCancelCallback, + * type? : "bytes", + * autoAllocateChunkSize? : number + * }} UnderlyingSource + * + */ + +class ReadableStream { + [kType] = 'ReadableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSource} [source] + * @param {QueuingStrategy} [strategy] + */ + constructor(source = {}, strategy = {}) { + if (source === null) + throw new ERR_INVALID_ARG_VALUE('source', 'Object', source); + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + // The spec requires handling of the strategy first + // here. Specifically, if getting the size and + // highWaterMark from the strategy fail, that has + // to trigger a throw before getting the details + // from the source. So be sure to keep these in + // this order. + const size = strategy?.size; + const highWaterMark = strategy?.highWaterMark; + const type = source.type; + + if (`${type}` === 'bytes') { + if (size !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.size', size); + setupReadableByteStreamControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 0)); + return; + } + + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE('source.type', type); + setupReadableStreamDefaultControllerFromSource( + this, + source, + extractHighWaterMark(highWaterMark, 1), + extractSizeAlgorithm(size)); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return isReadableStreamLocked(this); + } + + /** + * @param {any} [reason] + * @returns { Promise } + */ + cancel(reason = undefined) { + if (!isReadableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStream')); + if (isReadableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('ReadableStream is locked')); + } + return readableStreamCancel(this, reason); + } + + /** + * @param {{ + * mode? : "byob" + * }} [options] + * @returns {ReadableStreamReader} + */ + getReader(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options', { nullable: true, allowFunction: true }); + const mode = options?.mode; + + if (mode === undefined) + return new ReadableStreamDefaultReader(this); + + if (`${mode}` !== 'byob') + throw new ERR_INVALID_ARG_VALUE('options.mode', mode); + return new ReadableStreamBYOBReader(this); + } + + /** + * @param {ReadableWritablePair} transform + * @param {StreamPipeOptions} [options] + * @returns {ReadableStream} + */ + pipeThrough(transform, options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + const readable = transform?.readable; + if (!isReadableStream(readable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.readable', + 'ReadableStream', + readable); + } + const writable = transform?.writable; + if (!isWritableStream(writable)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + writable); + } + + // The web platform tests require that these be handled one at a + // time and in a specific order. options can be null or undefined. + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(writable)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + const promise = readableStreamPipeTo( + this, + writable, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + setPromiseHandled(promise); + + return readable; + } + + /** + * @param {WritableStream} destination + * @param {StreamPipeOptions} [options] + * @returns {Promise} + */ + pipeTo(destination, options = {}) { + try { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (!isWritableStream(destination)) { + throw new ERR_INVALID_ARG_TYPE( + 'transform.writable', + 'WritableStream', + destination); + } + + const preventAbort = options?.preventAbort; + const preventCancel = options?.preventCancel; + const preventClose = options?.preventClose; + const signal = options?.signal; + + if (signal !== undefined && signal?.[kAborted] === undefined) + throw new ERR_INVALID_ARG_TYPE('options.signal', 'AbortSignal', signal); + + if (isReadableStreamLocked(this)) + throw new ERR_INVALID_STATE.TypeError('The ReadableStream is locked'); + if (isWritableStreamLocked(destination)) + throw new ERR_INVALID_STATE.TypeError('The WritableStream is locked'); + + return readableStreamPipeTo( + this, + destination, + !!preventClose, + !!preventAbort, + !!preventCancel, + signal); + } catch (error) { + return PromiseReject(error); + } + } + + /** + * @returns {ReadableStream[]} + */ + tee() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + return readableStreamTee(this, false); + } + + /** + * @param {{ + * preventCancel? : boolean, + * }} [options] + * @returns {AsyncIterable} + */ + values(options = {}) { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + validateObject(options, 'options'); + const { + preventCancel = false, + } = options; + + const reader = new ReadableStreamDefaultReader(this); + let done = false; + let started = false; + let current; + + // The nextSteps function is not an async function in order + // to make it more efficient. Because nextSteps explicitly + // creates a Promise and returns it in the common case, + // making it an async function just causes two additional + // unnecessary Promise allocations to occur, which just add + // cost. + function nextSteps() { + if (done) + return PromiseResolve({ done: true, value: undefined }); + + if (reader[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream')); + } + const promise = createDeferredPromise(); + + readableStreamDefaultReaderRead(reader, { + [kChunk](chunk) { + current = undefined; + promise.resolve({ value: chunk, done: false }); + }, + [kClose]() { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.resolve({ done: true, value: undefined }); + }, + [kError](error) { + current = undefined; + done = true; + readableStreamReaderGenericRelease(reader); + promise.reject(error); + } + }); + return promise.promise; + } + + async function returnSteps(value) { + if (done) + return { done: true, value }; + done = true; + + if (reader[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'The reader is not bound to a ReadableStream'); + } + assert(!reader[kState].readRequests.length); + if (!preventCancel) { + const result = readableStreamReaderGenericCancel(reader, value); + readableStreamReaderGenericRelease(reader); + await result; + return { done: true, value }; + } + + readableStreamReaderGenericRelease(reader); + return { done: true, value }; + } + + // TODO(@jasnell): Explore whether an async generator + // can be used here instead of a custom iterator object. + return ObjectSetPrototypeOf({ + // Changing either of these functions (next or return) + // to async functions causes a failure in the streams + // Web Platform Tests that check for use of a modified + // Promise.prototype.then. Since the await keyword + // uses Promise.prototype.then, it is open to prototype + // polution, which causes the test to fail. The other + // await uses here do not trigger that failure because + // the test that fails does not trigger those code paths. + next() { + // If this is the first read, delay by one microtask + // to ensure that the controller has had an opportunity + // to properly start and perform the initial pull. + // TODO(@jasnell): The spec doesn't call this out so + // need to investigate if it's a bug in our impl or + // the spec. + if (!started) { + current = PromiseResolve(); + started = true; + } + current = current !== undefined ? + PromisePrototypeThen(current, nextSteps, nextSteps) : + nextSteps(); + return current; + }, + + return(error) { + return current ? + PromisePrototypeThen( + current, + () => returnSteps(error), + () => returnSteps(error)) : + returnSteps(error); + }, + + [SymbolAsyncIterator]() { return this; } + }, AsyncIterator); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isReadableStream(this)) + throw new ERR_INVALID_THIS('ReadableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + + const { + writable, + promise, + } = lazyTransfer().newCrossRealmWritableSink( + this, + this[kState].transfer.port1); + + this[kState].transfer.writable = writable; + this[kState].transfer.promise = promise; + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/readablestream:TransferedReadableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupReadableStreamDefaultControllerFromSource( + this, + new transfer.CrossRealmTransformReadableSource(port), + 0, () => 1); + } +} + +ObjectDefineProperties(ReadableStream.prototype, { + [SymbolAsyncIterator]: { + configurable: true, + enumerable: false, + writable: true, + value: ReadableStream.prototype.values, + }, + locked: { enumerable: true }, + cancel: { enumerable: true }, + getReader: { enumerable: true }, + pipeThrough: { enumerable: true }, + pipeTo: { enumerable: true }, + tee: { enumerable: true }, +}); + +function TransferedReadableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + }, + [], ReadableStream)); +} +TransferedReadableStream.prototype[kDeserialize] = () => {}; + +class ReadableStreamBYOBRequest { + [kType] = 'ReadableStreamBYOBRequest'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ArrayBufferView} + */ + get view() { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + return this[kState].view; + } + + /** + * @param {number} bytesWritten + */ + respond(bytesWritten) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + view, + controller, + } = this[kState]; + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached'); + } + + readableByteStreamControllerRespond(controller, bytesWritten); + } + + /** + * @param {ArrayBufferView} view + */ + respondWithNewView(view) { + if (!isReadableStreamBYOBRequest(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBRequest'); + const { + controller, + } = this[kState]; + + if (controller === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'This BYOB request has been invalidated'); + } + + readableByteStreamControllerRespondWithNewView(controller, view); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + view: this.view, + controller: this[kState].controller, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBRequest.prototype, { + view: { enumerable: true }, + respond: { enumerable: true }, + respondWithNewView: { enumerable: true }, +}); + +function createReadableStreamBYOBRequest(controller, view) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamBYOBRequest'; + this[kState] = { + controller, + view, + }; + }, + [], + ReadableStreamBYOBRequest + ); +} + +class DefaultReadRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose]() { + this[kState].resolve?.({ value: undefined, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadIntoRequest { + constructor() { + this[kState] = createDeferredPromise(); + } + + [kChunk](value) { + this[kState].resolve?.({ value, done: false }); + } + + [kClose](value) { + this[kState].resolve?.({ value, done: true }); + } + + [kError](error) { + this[kState].reject?.(error); + } + + get promise() { return this[kState].promise; } +} + +class ReadableStreamDefaultReader { + [kType] = 'ReadableStreamDefaultReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + readRequests: [], + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamDefaultReader(this, stream); + } + + /** + * @returns {Promise<{ + * value : any, + * done : boolean + * }>} + */ + read() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readRequest = new DefaultReadRequest(); + readableStreamDefaultReaderRead(this, readRequest); + return readRequest.promise; + } + + releaseLock() { + if (!isReadableStreamDefaultReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamDefaultReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamDefaultReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamDefaultReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + readRequests: this[kState].readRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamBYOBReader { + [kType] = 'ReadableStreamBYOBReader'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {ReadableStream} stream + */ + constructor(stream) { + if (!isReadableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'ReadableStream', stream); + this[kState] = { + stream: undefined, + requestIntoRequests: [], + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + }; + setupReadableStreamBYOBReader(this, stream); + } + + /** + * @param {ArrayBufferView} view + * @returns {Promise<{ + * view : ArrayBufferView, + * done : boolean, + * }>} + */ + read(view) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (!isArrayBufferView(view)) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view)); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + if (viewByteLength === 0 || viewBufferByteLength === 0) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'View ArrayBuffer is zero-length or detached')); + } + // Supposed to assert here that the view's buffer is not + // detached, but there's no API available to use to check that. + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + const readIntoRequest = new ReadIntoRequest(); + readableStreamBYOBReaderRead(this, view, readIntoRequest); + return readIntoRequest.promise; + } + + releaseLock() { + if (!isReadableStreamBYOBReader(this)) + throw new ERR_INVALID_THIS('ReadableStreamBYOBReader'); + if (this[kState].stream === undefined) + return; + if (this[kState].readIntoRequests.length) { + throw new ERR_INVALID_STATE.TypeError( + 'Cannot release with pending read requests'); + } + readableStreamReaderGenericRelease(this); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + return this[kState].close.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + cancel(reason = undefined) { + if (!isReadableStreamBYOBReader(this)) + return PromiseReject(new ERR_INVALID_THIS('ReadableStreamBYOBReader')); + if (this[kState].stream === undefined) { + return PromiseReject(new ERR_INVALID_STATE.TypeError( + 'The reader is not attached to a stream')); + } + return readableStreamReaderGenericCancel(this, reason); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + requestIntoRequests: this[kState].requestIntoRequests.length, + close: this[kState].close.promise, + }); + } +} + +ObjectDefineProperties(ReadableStreamBYOBReader.prototype, { + closed: { enumerable: true }, + read: { enumerable: true }, + releaseLock: { enumerable: true }, + cancel: { enumerable: true }, +}); + +class ReadableStreamDefaultController { + [kType] = 'ReadableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + return readableStreamDefaultControllerGetDesiredSize(this); + } + + close() { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerClose(this); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(this)) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + readableStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} error + */ + error(error = undefined) { + readableStreamDefaultControllerError(this, error); + } + + [kCancel](reason) { + return readableStreamDefaultControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableStreamDefaultControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStreamDefaultController'; + this[kState] = {}; + }, + [], + ReadableStreamDefaultController, + ); +} + +class ReadableByteStreamController { + [kType] = 'ReadableByteStreamController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {ReadableStreamBYOBRequest} + */ + get byobRequest() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].byobRequest === null && + this[kState].pendingPullIntos.length) { + const { + buffer, + byteOffset, + bytesFilled, + byteLength, + } = this[kState].pendingPullIntos[0]; + const view = + new Uint8Array( + buffer, + byteOffset + bytesFilled, + byteLength - bytesFilled); + this[kState].byobRequest = createReadableStreamBYOBRequest(this, view); + } + return this[kState].byobRequest; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + return readableByteStreamControllerGetDesiredSize(this); + } + + close() { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerClose(this); + } + + /** + * @param {ArrayBufferView} chunk + */ + enqueue(chunk) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + if (!isArrayBufferView(chunk)) { + throw new ERR_INVALID_ARG_TYPE( + 'chunk', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + chunk); + } + const chunkByteLength = ArrayBufferViewGetByteLength(chunk); + const chunkByteOffset = ArrayBufferViewGetByteOffset(chunk); + const chunkBuffer = ArrayBufferViewGetBuffer(chunk); + const chunkBufferByteLength = ArrayBufferGetByteLength(chunkBuffer); + if (chunkByteLength === 0 || chunkBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'chunk ArrayBuffer is zero-length or detached'); + } + if (this[kState].closeRequested) + throw new ERR_INVALID_STATE.TypeError('Controller is already closed'); + if (this[kState].stream[kState].state !== 'readable') + throw new ERR_INVALID_STATE.TypeError('ReadableStream is already closed'); + readableByteStreamControllerEnqueue( + this, + chunkBuffer, + chunkByteLength, + chunkByteOffset); + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isReadableByteStreamController(this)) + throw new ERR_INVALID_THIS('ReadableByteStreamController'); + readableByteStreamControllerError(this, error); + } + + [kCancel](reason) { + return readableByteStreamControllerCancelSteps(this, reason); + } + + [kPull](readRequest) { + readableByteStreamControllerPullSteps(this, readRequest); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { }); + } +} + +ObjectDefineProperties(ReadableByteStreamController.prototype, { + byobRequest: { enumerable: true }, + desiredSize: { enumerable: true }, + close: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, +}); + +function createReadableByteStreamController() { + return ReflectConstruct( + function() { + this[kType] = 'ReadableByteStreamController'; + this[kState] = {}; + }, + [], + ReadableByteStreamController, + ); +} + +function createTeeReadableStream(start, pull, cancel) { + return ReflectConstruct( + function() { + this[kType] = 'ReadableStream'; + this[kState] = { + disturbed: false, + state: 'readable', + storedError: undefined, + stream: undefined, + transfer: { + writable: undefined, + port: undefined, + promise: undefined, + } + }; + setupReadableStreamDefaultControllerFromSource( + this, + ObjectCreate(null, { + start: { value: start }, + pull: { value: pull }, + cancel: { value: cancel } + }), + 1, + () => 1); + return makeTransferable(this); + }, [], ReadableStream, + ); +} + +const isReadableStream = + isBrandCheck('ReadableStream'); +const isReadableByteStreamController = + isBrandCheck('ReadableByteStreamController'); +const isReadableStreamBYOBRequest = + isBrandCheck('ReadableStreamBYOBRequest'); +const isReadableStreamDefaultReader = + isBrandCheck('ReadableStreamDefaultReader'); +const isReadableStreamBYOBReader = + isBrandCheck('ReadableStreamBYOBReader'); + +// ---- ReadableStream Implementation + +function readableStreamPipeTo( + source, + dest, + preventClose, + preventAbort, + preventCancel, + signal) { + + let reader; + let writer; + // Both of these can throw synchronously. We want to capture + // the error and return a rejected promise instead. + try { + reader = new ReadableStreamDefaultReader(source); + writer = new WritableStreamDefaultWriter(dest); + } catch (error) { + return PromiseReject(error); + } + + source[kState].disturbed = true; + + let shuttingDown = false; + + if (signal !== undefined && signal?.[kAborted] === undefined) { + return PromiseReject( + new ERR_INVALID_ARG_TYPE( + 'options.signal', + 'AbortSignal', + signal)); + } + + const promise = createDeferredPromise(); + + let currentWrite = PromiseResolve(); + + // The error here can be undefined. The rejected arg + // tells us that the promise must be rejected even + // when error is undefine. + function finalize(rejected, error) { + writableStreamDefaultWriterRelease(writer); + readableStreamReaderGenericRelease(reader); + if (signal !== undefined) + signal.removeEventListener('abort', abortAlgorithm); + if (rejected) + promise.reject(error); + else + promise.resolve(); + } + + async function waitForCurrentWrite() { + const write = currentWrite; + await write; + if (write !== currentWrite) + await waitForCurrentWrite(); + } + + function shutdownWithAnAction(action, rejected, originalError) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + complete, + (error) => finalize(true, error)); + return; + } + complete(); + + function complete() { + PromisePrototypeThen( + action(), + () => finalize(rejected, originalError), + (error) => finalize(true, error)); + } + } + + function shutdown(rejected, error) { + if (shuttingDown) return; + shuttingDown = true; + if (dest[kState].state === 'writable' && + !writableStreamCloseQueuedOrInFlight(dest)) { + PromisePrototypeThen( + waitForCurrentWrite(), + () => finalize(rejected, error), + (error) => finalize(true, error)); + return; + } + finalize(rejected, error); + } + + function abortAlgorithm() { + // Cannot use the AbortError class here. It must be a DOMException + const error = new DOMException('The operation was aborted', 'AbortError'); + const actions = []; + if (!preventAbort) { + ArrayPrototypePush( + actions, + () => { + if (dest[kState].state === 'writable') + return writableStreamAbort(dest, error); + return PromiseResolve(); + }); + } + if (!preventCancel) { + ArrayPrototypePush( + actions, + () => { + if (source[kState].state === 'readable') + return readableStreamCancel(source, error); + return PromiseResolve(); + }); + } + + shutdownWithAnAction( + async () => PromiseAll(actions.map((action) => action())), + true, + error); + } + + function watchErrored(stream, promise, action) { + if (stream[kState].state === 'errored') + action(stream[kState].storedError); + else + PromisePrototypeCatch(promise, action); + } + + function watchClosed(stream, promise, action) { + if (stream[kState].state === 'closed') + action(stream[kState].storedError); + else + PromisePrototypeThen(promise, action, () => {}); + } + + async function step() { + if (shuttingDown) + return true; + await writer[kState].ready.promise; + return new Promise((resolve, reject) => { + readableStreamDefaultReaderRead( + reader, + { + [kChunk](chunk) { + currentWrite = writableStreamDefaultWriterWrite(writer, chunk); + setPromiseHandled(currentWrite); + resolve(false); + }, + [kClose]: () => resolve(true), + [kError]: reject, + }); + }); + } + + async function run() { + // Run until step resolves as true + while (!await step()) {} + } + + if (signal !== undefined) { + if (signal.aborted) { + abortAlgorithm(); + return promise.promise; + } + signal.addEventListener('abort', abortAlgorithm, { once: true }); + } + + setPromiseHandled(run()); + + watchErrored(source, reader[kState].close.promise, (error) => { + if (!preventAbort) { + return shutdownWithAnAction( + () => writableStreamAbort(dest, error), + true, + error); + } + shutdown(true, error); + }); + + watchErrored(dest, writer[kState].close.promise, (error) => { + if (!preventCancel) { + return shutdownWithAnAction( + () => readableStreamCancel(source, error), + true, + error); + } + shutdown(true, error); + }); + + watchClosed(source, reader[kState].close.promise, () => { + if (!preventClose) { + return shutdownWithAnAction( + () => writableStreamDefaultWriterCloseWithErrorPropagation(writer)); + } + shutdown(); + }); + + if (writableStreamCloseQueuedOrInFlight(dest) || + dest[kState].state === 'closed') { + const error = new ERR_INVALID_STATE.TypeError( + 'Destination WritableStream is closed'); + if (!preventCancel) { + shutdownWithAnAction( + () => readableStreamCancel(source, error), true, error); + } else { + shutdown(true, error); + } + } + + return promise.promise; +} + +function readableStreamTee(stream, cloneForBranch2) { + const reader = new ReadableStreamDefaultReader(stream); + let reading = false; + let canceled1 = false; + let canceled2 = false; + let reason1; + let reason2; + let branch1; + let branch2; + const cancelPromise = createDeferredPromise(); + + async function pullAlgorithm() { + if (reading) return; + reading = true; + const readRequest = { + [kChunk](value) { + queueMicrotask(() => { + reading = false; + const value1 = value; + let value2 = value; + if (!canceled2 && cloneForBranch2) { + // Structured Clone + value2 = deserialize(serialize(value2)); + } + if (!canceled1) { + readableStreamDefaultControllerEnqueue( + branch1[kState].controller, + value1); + } + if (!canceled2) { + readableStreamDefaultControllerEnqueue( + branch2[kState].controller, + value2); + } + }); + }, + [kClose]() { + reading = false; + if (!canceled1) + readableStreamDefaultControllerClose(branch1[kState].controller); + if (!canceled2) + readableStreamDefaultControllerClose(branch2[kState].controller); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }, + [kError]() { + reading = false; + }, + }; + readableStreamDefaultReaderRead(reader, readRequest); + } + + function cancel1Algorithm(reason) { + canceled1 = true; + reason1 = reason; + if (canceled2) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + function cancel2Algorithm(reason) { + canceled2 = true; + reason2 = reason; + if (canceled1) { + const compositeReason = [reason1, reason2]; + cancelPromise.resolve(readableStreamCancel(stream, compositeReason)); + } + return cancelPromise.promise; + } + + branch1 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel1Algorithm); + branch2 = + createTeeReadableStream(nonOpStart, pullAlgorithm, cancel2Algorithm); + + PromisePrototypeCatch( + reader[kState].close.promise, + (error) => { + readableStreamDefaultControllerError(branch1[kState].controller, error); + readableStreamDefaultControllerError(branch2[kState].controller, error); + if (!canceled1 || !canceled2) + cancelPromise.resolve(); + }); + + return [branch1, branch2]; +} + +function readableByteStreamControllerConvertPullIntoDescriptor(desc) { + const { + buffer, + bytesFilled, + byteLength, + byteOffset, + ctor, + elementSize, + } = desc; + if (bytesFilled > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + assert(!(bytesFilled % elementSize)); + const transferedBuffer = transferArrayBuffer(buffer); + return new ctor(transferedBuffer, byteOffset, bytesFilled / elementSize); +} + +function isReadableStreamLocked(stream) { + return stream[kState].reader !== undefined; +} + +function readableStreamCancel(stream, reason) { + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + return PromiseResolve(); + case 'errored': + return PromiseReject(stream[kState].storedError); + } + readableStreamClose(stream); + const { + reader, + } = stream[kState]; + if (reader !== undefined && readableStreamHasBYOBReader(stream)) { + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kClose](); + reader[kState].readIntoRequests = []; + } + + return PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kCancel], + stream[kState].controller, + reason), + () => {}); +} + +function readableStreamClose(stream) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'closed'; + + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.resolve(); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kClose](); + reader[kState].readRequests = []; + } +} + +function readableStreamError(stream, error) { + assert(stream[kState].state === 'readable'); + stream[kState].state = 'errored'; + stream[kState].storedError = error; + + const { + reader + } = stream[kState]; + + if (reader === undefined) + return; + + reader[kState].close.reject(error); + setPromiseHandled(reader[kState].close.promise); + + if (readableStreamHasDefaultReader(stream)) { + for (let n = 0; n < reader[kState].readRequests.length; n++) + reader[kState].readRequests[n][kError](error); + reader[kState].readRequests = []; + } else { + assert(readableStreamHasBYOBReader(stream)); + for (let n = 0; n < reader[kState].readIntoRequests.length; n++) + reader[kState].readIntoRequests[n][kError](error); + reader[kState].readIntoRequests = []; + } +} + +function readableStreamHasDefaultReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamDefaultReader'; +} + +function readableStreamGetNumReadRequests(stream) { + assert(readableStreamHasDefaultReader(stream)); + return stream[kState].reader[kState].readRequests.length; +} + +function readableStreamHasBYOBReader(stream) { + const { + reader, + } = stream[kState]; + + if (reader === undefined) + return false; + + return reader[kState] !== undefined && + reader[kType] === 'ReadableStreamBYOBReader'; +} + +function readableStreamGetNumReadIntoRequests(stream) { + assert(readableStreamHasBYOBReader(stream)); + return stream[kState].reader[kState].readIntoRequests.length; +} + +function readableStreamFulfillReadRequest(stream, chunk, done) { + assert(readableStreamHasDefaultReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readRequests.length); + const readRequest = ArrayPrototypeShift(reader[kState].readRequests); + + // TODO(@jasnell): It's not clear under what exact conditions done + // will be true here. The spec requires this check but none of the + // WPT's or other tests trigger it. Will need to investigate how to + // get coverage for this. + if (done) + readRequest[kClose](); + else + readRequest[kChunk](chunk); +} + +function readableStreamFulfillReadIntoRequest(stream, chunk, done) { + assert(readableStreamHasBYOBReader(stream)); + const { + reader, + } = stream[kState]; + assert(reader[kState].readIntoRequests.length); + const readIntoRequest = ArrayPrototypeShift(reader[kState].readIntoRequests); + if (done) + readIntoRequest[kClose](chunk); + else + readIntoRequest[kChunk](chunk); +} + +function readableStreamAddReadRequest(stream, readRequest) { + assert(readableStreamHasDefaultReader(stream)); + assert(stream[kState].state === 'readable'); + ArrayPrototypePush(stream[kState].reader[kState].readRequests, readRequest); +} + +function readableStreamAddReadIntoRequest(stream, readIntoRequest) { + assert(readableStreamHasBYOBReader(stream)); + assert(stream[kState].state !== 'errored'); + ArrayPrototypePush( + stream[kState].reader[kState].readIntoRequests, + readIntoRequest); +} + +function readableStreamReaderGenericCancel(reader, reason) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + return readableStreamCancel(stream, reason); +} + +function readableStreamReaderGenericInitialize(reader, stream) { + reader[kState].stream = stream; + stream[kState].reader = reader; + switch (stream[kState].state) { + case 'readable': + reader[kState].close = createDeferredPromise(); + break; + case 'closed': + reader[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + case 'errored': + reader[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(reader[kState].close.promise); + break; + } +} + +function readableStreamReaderGenericRelease(reader) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + assert(stream[kState].reader === reader); + + if (stream[kState].state === 'readable') { + reader[kState].close.reject?.( + new ERR_INVALID_STATE.TypeError('Reader released')); + } else { + reader[kState].close = { + promise: PromiseReject( + new ERR_INVALID_STATE.TypeError('Reader released')), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(reader[kState].close.promise); + stream[kState].reader = undefined; + reader[kState].stream = undefined; +} + +function readableStreamBYOBReaderRead(reader, view, readIntoRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + if (stream[kState].state === 'errored') { + readIntoRequest[kError](stream[kState].storedError); + return; + } + readableByteStreamControllerPullInto( + stream[kState].controller, + view, + readIntoRequest); +} + +function readableStreamDefaultReaderRead(reader, readRequest) { + const { + stream, + } = reader[kState]; + assert(stream !== undefined); + stream[kState].disturbed = true; + switch (stream[kState].state) { + case 'closed': + readRequest[kClose](); + break; + case 'errored': + readRequest[kError](stream[kState].storedError); + break; + case 'readable': + stream[kState].controller[kPull](readRequest); + } +} + +function setupReadableStreamBYOBReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + const { + controller, + } = stream[kState]; + if (!isReadableByteStreamController(controller)) + throw new ERR_INVALID_ARG_VALUE('reader', reader, 'must be a byte stream'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readIntoRequests = []; +} + +function setupReadableStreamDefaultReader(reader, stream) { + if (isReadableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('ReadableStream is locked'); + readableStreamReaderGenericInitialize(reader, stream); + reader[kState].readRequests = []; +} + +function readableStreamDefaultControllerClose(controller) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + controller[kState].closeRequested = true; + if (!controller[kState].queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(controller[kState].stream); + } +} + +function readableStreamDefaultControllerEnqueue(controller, chunk) { + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller)) + return; + + const { + stream, + } = controller[kState]; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + readableStreamFulfillReadRequest(stream, chunk, false); + } else { + try { + const chunkSize = + FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + readableStreamDefaultControllerError(controller, error); + throw error; + } + } + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function readableStreamDefaultControllerHasBackpressure(controller) { + return !readableStreamDefaultControllerShouldCallPull(controller); +} + +function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { + const { + stream, + } = controller[kState]; + return !controller[kState].closeRequested && + stream[kState].state === 'readable'; +} + +function readableStreamDefaultControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: + return highWaterMark - queueTotalSize; + } +} + +function readableStreamDefaultControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(controller) || + !controller[kState].started) + return false; + + if (isReadableStreamLocked(stream) && + readableStreamGetNumReadRequests(stream)) { + return true; + } + + const desiredSize = readableStreamDefaultControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableStreamDefaultControllerCallPullIfNeeded(controller) { + if (!readableStreamDefaultControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function readableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function readableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'readable') { + resetQueue(controller); + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamError(stream, error); + } +} + +function readableStreamDefaultControllerCancelSteps(controller, reason) { + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableStreamDefaultControllerClearAlgorithms(controller); + return result; +} + +function readableStreamDefaultControllerPullSteps(controller, readRequest) { + const { + stream, + queue, + } = controller[kState]; + if (queue.length) { + const chunk = dequeueValue(controller); + if (controller[kState].closeRequested && !queue.length) { + readableStreamDefaultControllerClearAlgorithms(controller); + readableStreamClose(stream); + } else { + readableStreamDefaultControllerCallPullIfNeeded(controller); + } + readRequest[kChunk](chunk); + return; + } + readableStreamAddReadRequest(stream, readRequest); + readableStreamDefaultControllerCallPullIfNeeded(controller); +} + +function setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(stream[kState].controller === undefined); + controller[kState] = { + cancelAlgorithm, + closeRequested: false, + highWaterMark, + pullAgain: false, + pullAlgorithm, + pulling: false, + queue: [], + queueTotalSize: 0, + started: false, + sizeAlgorithm, + stream, + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableStreamDefaultControllerCallPullIfNeeded(controller); + }, + (error) => readableStreamDefaultControllerError(controller, error)); +} + +function setupReadableStreamDefaultControllerFromSource( + stream, + source, + highWaterMark, + sizeAlgorithm) { + const controller = createReadableStreamDefaultController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + setupReadableStreamDefaultController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function readableByteStreamControllerClose(controller) { + const { + closeRequested, + pendingPullIntos, + queueTotalSize, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + if (queueTotalSize) { + controller[kState].closeRequested = true; + return; + } + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + if (firstPendingPullInto.bytesFilled > 0) { + const error = new ERR_INVALID_STATE.TypeError('Partial read'); + readableByteStreamControllerError(controller, error); + throw error; + } + } + + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); +} + +function readableByteStreamControllerCommitPullIntoDescriptor(stream, desc) { + assert(stream[kState].state !== 'errored'); + let done = false; + if (stream[kState].state === 'closed') { + desc.bytesFilled = 0; + done = true; + } + + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + + if (desc.type === 'default') { + readableStreamFulfillReadRequest(stream, filledView, done); + } else { + assert(desc.type === 'byob'); + readableStreamFulfillReadIntoRequest(stream, filledView, done); + } +} + +function readableByteStreamControllerInvalidateBYOBRequest(controller) { + if (controller[kState].byobRequest === null) + return; + controller[kState].byobRequest[kState].controller = undefined; + controller[kState].byobRequest[kState].view = null; + controller[kState].byobRequest = null; +} + +function readableByteStreamControllerClearAlgorithms(controller) { + controller[kState].pullAlgorithm = undefined; + controller[kState].cancelAlgorithm = undefined; +} + +function readableByteStreamControllerClearPendingPullIntos(controller) { + readableByteStreamControllerInvalidateBYOBRequest(controller); + controller[kState].pendingPullIntos = []; +} + +function readableByteStreamControllerGetDesiredSize(controller) { + const { + stream, + highWaterMark, + queueTotalSize, + } = controller[kState]; + switch (stream[kState].state) { + case 'errored': return null; + case 'closed': return 0; + default: return highWaterMark - queueTotalSize; + } +} + +function readableByteStreamControllerShouldCallPull(controller) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable' || + controller[kState].closeRequested || + !controller[kState].started) { + return false; + } + if (readableStreamHasDefaultReader(stream) && + readableStreamGetNumReadRequests(stream) > 0) { + return true; + } + + if (readableStreamHasBYOBReader(stream) && + readableStreamGetNumReadIntoRequests(stream) > 0) { + return true; + } + + const desiredSize = readableByteStreamControllerGetDesiredSize(controller); + assert(desiredSize !== null); + + return desiredSize > 0; +} + +function readableByteStreamControllerHandleQueueDrain(controller) { + const { + closeRequested, + queueTotalSize, + stream, + } = controller[kState]; + assert(stream[kState].state === 'readable'); + if (!queueTotalSize && closeRequested) { + readableByteStreamControllerClearAlgorithms(controller); + readableStreamClose(stream); + return; + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerPullInto( + controller, + view, + readIntoRequest) { + const { + closeRequested, + stream, + pendingPullIntos, + } = controller[kState]; + let elementSize = 1; + let ctor = DataViewCtor; + if (isArrayBufferView(view) && !isDataView(view)) { + elementSize = view.constructor.BYTES_PER_ELEMENT; + ctor = view.constructor; + } + const buffer = ArrayBufferViewGetBuffer(view); + const byteOffset = ArrayBufferViewGetByteOffset(view); + const byteLength = ArrayBufferViewGetByteLength(view); + const bufferByteLength = ArrayBufferGetByteLength(buffer); + + let transferedBuffer; + try { + transferedBuffer = transferArrayBuffer(buffer); + } catch (error) { + readIntoRequest[kError](error); + return; + } + const desc = { + buffer: transferedBuffer, + bufferByteLength, + byteOffset, + byteLength, + bytesFilled: 0, + elementSize, + ctor, + type: 'byob', + }; + if (pendingPullIntos.length) { + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + return; + } + if (stream[kState].state === 'closed') { + const emptyView = new ctor(desc.buffer, byteOffset, 0); + readIntoRequest[kClose](emptyView); + return; + } + if (controller[kState].queueTotalSize) { + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + const filledView = + readableByteStreamControllerConvertPullIntoDescriptor(desc); + readableByteStreamControllerHandleQueueDrain(controller); + readIntoRequest[kChunk](filledView); + return; + } + if (closeRequested) { + const error = new ERR_INVALID_STATE.TypeError('ReadableStream closed'); + readableByteStreamControllerError(controller, error); + readIntoRequest[kError](error); + return; + } + } + ArrayPrototypePush(pendingPullIntos, desc); + readableStreamAddReadIntoRequest(stream, readIntoRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespondInternal(controller, bytesWritten) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + const desc = pendingPullIntos[0]; + readableByteStreamControllerInvalidateBYOBRequest(controller); + if (stream[kState].state === 'closed') { + if (bytesWritten) + throw new ERR_INVALID_STATE.TypeError( + 'Controller is closed but view is not zero-length'); + readableByteStreamControllerRespondInClosedState(controller, desc); + } else { + assert(stream[kState].state === 'readable'); + if (!bytesWritten) + throw new ERR_INVALID_STATE.TypeError('View cannot be zero-length'); + readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerRespond(controller, bytesWritten) { + const { + pendingPullIntos, + stream, + } = controller[kState]; + assert(pendingPullIntos.length); + const desc = pendingPullIntos[0]; + + if (stream[kState].state === 'closed') { + if (bytesWritten !== 0) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + } else { + assert(stream[kState].state === 'readable'); + + if (!bytesWritten) + throw new ERR_INVALID_ARG_VALUE('bytesWritten', bytesWritten); + + if ((desc.bytesFilled + bytesWritten) > desc.byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('bytesWritten', bytesWritten); + } + + desc.buffer = transferArrayBuffer(desc.buffer); + + readableByteStreamControllerRespondInternal(controller, bytesWritten); +} + +function readableByteStreamControllerRespondInClosedState(controller, desc) { + assert(!desc.bytesFilled); + const { + stream, + } = controller[kState]; + if (readableStreamHasBYOBReader(stream)) { + while (readableStreamGetNumReadIntoRequests(stream) > 0) { + readableByteStreamControllerCommitPullIntoDescriptor( + stream, + readableByteStreamControllerShiftPendingPullInto(controller)); + } + } +} + +function readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + size, + desc) { + const { + pendingPullIntos, + byobRequest, + } = controller[kState]; + assert(!pendingPullIntos.length || pendingPullIntos[0] === desc); + assert(byobRequest === null); + desc.bytesFilled += size; +} + +function readableByteStreamControllerEnqueue( + controller, + buffer, + byteLength, + byteOffset) { + const { + closeRequested, + pendingPullIntos, + queue, + stream, + } = controller[kState]; + + if (closeRequested || stream[kState].state !== 'readable') + return; + + const transferedBuffer = transferArrayBuffer(buffer); + + if (pendingPullIntos.length) { + const firstPendingPullInto = pendingPullIntos[0]; + + const pendingBufferByteLength = + ArrayBufferGetByteLength(firstPendingPullInto.buffer); + if (pendingBufferByteLength === 0) { + throw new ERR_INVALID_STATE.TypeError( + 'Destination ArrayBuffer is zero-length or detached'); + } + + firstPendingPullInto.buffer = + transferArrayBuffer(firstPendingPullInto.buffer); + } + + readableByteStreamControllerInvalidateBYOBRequest(controller); + + if (readableStreamHasDefaultReader(stream)) { + if (!readableStreamGetNumReadRequests(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } else { + assert(!queue.length); + const transferedView = + new Uint8Array(transferedBuffer, byteOffset, byteLength); + readableStreamFulfillReadRequest(stream, transferedView, false); + } + } else if (readableStreamHasBYOBReader(stream)) { + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller); + } else { + assert(!isReadableStreamLocked(stream)); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + transferedBuffer, + byteOffset, + byteLength); + } + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function readableByteStreamControllerEnqueueChunkToQueue( + controller, + buffer, + byteOffset, + byteLength) { + ArrayPrototypePush( + controller[kState].queue, + { + buffer, + byteOffset, + byteLength, + }); + controller[kState].queueTotalSize += byteLength; +} + +function readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc) { + const { + buffer, + byteLength, + byteOffset, + bytesFilled, + elementSize, + } = desc; + const currentAlignedBytes = bytesFilled - (bytesFilled % elementSize); + const maxBytesToCopy = MathMin( + controller[kState].queueTotalSize, + byteLength - bytesFilled); + const maxBytesFilled = bytesFilled + maxBytesToCopy; + const maxAlignedBytes = maxBytesFilled - (maxBytesFilled % elementSize); + let totalBytesToCopyRemaining = maxBytesToCopy; + let ready = false; + if (maxAlignedBytes > currentAlignedBytes) { + totalBytesToCopyRemaining = maxAlignedBytes - bytesFilled; + ready = true; + } + const { + queue, + } = controller[kState]; + + while (totalBytesToCopyRemaining) { + const headOfQueue = queue[0]; + const bytesToCopy = MathMin( + totalBytesToCopyRemaining, + headOfQueue.byteLength); + const destStart = byteOffset + desc.bytesFilled; + const arrayBufferByteLength = ArrayBufferGetByteLength(buffer); + if (arrayBufferByteLength - destStart < bytesToCopy) { + throw new ERR_INVALID_STATE.RangeError( + 'view ArrayBuffer size is invalid'); + } + assert(arrayBufferByteLength - destStart >= bytesToCopy); + copyArrayBuffer( + buffer, + destStart, + headOfQueue.buffer, + headOfQueue.byteOffset, + bytesToCopy); + if (headOfQueue.byteLength === bytesToCopy) { + ArrayPrototypeShift(queue); + } else { + headOfQueue.byteOffset += bytesToCopy; + headOfQueue.byteLength -= bytesToCopy; + } + controller[kState].queueTotalSize -= bytesToCopy; + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesToCopy, + desc); + totalBytesToCopyRemaining -= bytesToCopy; + } + + if (!ready) { + assert(!controller[kState].queueTotalSize); + assert(desc.bytesFilled > 0); + assert(desc.bytesFilled < elementSize); + } + return ready; +} + +function readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( + controller) { + const { + closeRequested, + pendingPullIntos, + stream, + } = controller[kState]; + assert(!closeRequested); + while (pendingPullIntos.length) { + if (!controller[kState].queueTotalSize) + return; + const desc = pendingPullIntos[0]; + if (readableByteStreamControllerFillPullIntoDescriptorFromQueue( + controller, + desc)) { + readableByteStreamControllerShiftPendingPullInto(controller); + readableByteStreamControllerCommitPullIntoDescriptor(stream, desc); + } + } +} + +function readableByteStreamControllerRespondInReadableState( + controller, + bytesWritten, + desc) { + const { + buffer, + bytesFilled, + byteLength, + } = desc; + + if (bytesFilled + bytesWritten > byteLength) + throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); + + readableByteStreamControllerFillHeadPullIntoDescriptor( + controller, + bytesWritten, + desc); + + if (desc.bytesFilled < desc.elementSize) + return; + + readableByteStreamControllerShiftPendingPullInto(controller); + + const remainderSize = desc.bytesFilled % desc.elementSize; + + if (remainderSize) { + const end = desc.byteOffset + desc.bytesFilled; + const start = end - remainderSize; + const remainder = + ArrayBufferPrototypeSlice( + buffer, + start, + end); + readableByteStreamControllerEnqueueChunkToQueue( + controller, + remainder, + 0, + ArrayBufferGetByteLength(remainder)); + } + desc.bytesFilled -= remainderSize; + readableByteStreamControllerCommitPullIntoDescriptor( + controller[kState].stream, + desc); + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller); +} + +function readableByteStreamControllerRespondWithNewView(controller, view) { + const { + stream, + pendingPullIntos, + } = controller[kState]; + assert(pendingPullIntos.length); + + const desc = pendingPullIntos[0]; + assert(stream[kState].state !== 'errored'); + + if (!isArrayBufferView(view)) { + throw new ERR_INVALID_ARG_TYPE( + 'view', + [ + 'Buffer', + 'TypedArray', + 'DataView', + ], + view); + } + const viewByteLength = ArrayBufferViewGetByteLength(view); + const viewByteOffset = ArrayBufferViewGetByteOffset(view); + const viewBuffer = ArrayBufferViewGetBuffer(view); + const viewBufferByteLength = ArrayBufferGetByteLength(viewBuffer); + + const { + byteOffset, + byteLength, + bytesFilled, + bufferByteLength, + } = desc; + + if (byteOffset + bytesFilled !== viewByteOffset) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bytesFilled + viewByteOffset > byteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + if (bufferByteLength !== viewBufferByteLength) + throw new ERR_INVALID_ARG_VALUE.RangeError('view', view); + + desc.buffer = transferArrayBuffer(viewBuffer); + + readableByteStreamControllerRespondInternal(controller, viewByteLength); +} + +function readableByteStreamControllerShiftPendingPullInto(controller) { + assert(controller[kState].byobRequest === null); + return ArrayPrototypeShift(controller[kState].pendingPullIntos); +} + +function readableByteStreamControllerCallPullIfNeeded(controller) { + if (!readableByteStreamControllerShouldCallPull(controller)) + return; + if (controller[kState].pulling) { + controller[kState].pullAgain = true; + return; + } + assert(!controller[kState].pullAgain); + controller[kState].pulling = true; + PromisePrototypeThen( + ensureIsPromise(controller[kState].pullAlgorithm, controller), + () => { + controller[kState].pulling = false; + if (controller[kState].pullAgain) { + controller[kState].pullAgain = false; + readableByteStreamControllerCallPullIfNeeded(controller); + } + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function readableByteStreamControllerError(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state !== 'readable') + return; + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + readableByteStreamControllerClearAlgorithms(controller); + readableStreamError(stream, error); +} + +function readableByteStreamControllerCancelSteps(controller, reason) { + readableByteStreamControllerClearPendingPullIntos(controller); + resetQueue(controller); + const result = controller[kState].cancelAlgorithm(reason); + readableByteStreamControllerClearAlgorithms(controller); + return result; +} + +function readableByteStreamControllerPullSteps(controller, readRequest) { + const { + pendingPullIntos, + queue, + queueTotalSize, + stream, + } = controller[kState]; + assert(readableStreamHasDefaultReader(stream)); + if (queueTotalSize) { + assert(!readableStreamGetNumReadRequests(stream)); + const { + buffer, + byteOffset, + byteLength, + } = ArrayPrototypeShift(queue); + controller[kState].queueTotalSize -= byteLength; + readableByteStreamControllerHandleQueueDrain(controller); + const view = new Uint8Array(buffer, byteOffset, byteLength); + readRequest[kChunk](view); + return; + } + const { + autoAllocateChunkSize, + } = controller[kState]; + if (autoAllocateChunkSize !== undefined) { + try { + const buffer = new ArrayBuffer(autoAllocateChunkSize); + ArrayPrototypePush( + pendingPullIntos, + { + buffer, + byteOffset: 0, + byteLength: autoAllocateChunkSize, + bytesFilled: 0, + elementSize: 1, + ctor: Uint8Array, + type: 'default', + }); + } catch (error) { + readRequest[kError](error); + return; + } + } + + readableStreamAddReadRequest(stream, readRequest); + readableByteStreamControllerCallPullIfNeeded(controller); +} + +function setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize) { + assert(stream[kState].controller === undefined); + if (autoAllocateChunkSize !== undefined) { + assert(NumberIsInteger(autoAllocateChunkSize)); + assert(autoAllocateChunkSize > 0); + } + controller[kState] = { + byobRequest: null, + closeRequested: false, + pullAgain: false, + pulling: false, + started: false, + stream, + queue: [], + queueTotalSize: 0, + highWaterMark, + pullAlgorithm, + cancelAlgorithm, + autoAllocateChunkSize, + pendingPullIntos: [], + }; + stream[kState].controller = controller; + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + controller[kState].started = true; + assert(!controller[kState].pulling); + assert(!controller[kState].pullAgain); + readableByteStreamControllerCallPullIfNeeded(controller); + }, + (error) => readableByteStreamControllerError(controller, error)); +} + +function setupReadableByteStreamControllerFromSource( + stream, + source, + highWaterMark) { + const controller = createReadableByteStreamController(); + const start = source?.start; + const pull = source?.pull; + const cancel = source?.cancel; + const autoAllocateChunkSize = source?.autoAllocateChunkSize; + const startAlgorithm = start ? + FunctionPrototypeBind(start, source, controller) : + nonOpStart; + const pullAlgorithm = pull ? + FunctionPrototypeBind(pull, source, controller) : + nonOpPull; + const cancelAlgorithm = cancel ? + FunctionPrototypeBind(cancel, source) : + nonOpCancel; + + if (autoAllocateChunkSize === 0) { + throw new ERR_INVALID_ARG_VALUE( + 'source.autoAllocateChunkSize', + autoAllocateChunkSize); + } + setupReadableByteStreamController( + stream, + controller, + startAlgorithm, + pullAlgorithm, + cancelAlgorithm, + highWaterMark, + autoAllocateChunkSize); +} + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransferedReadableStream, + + // Exported Brand Checks + isReadableStream, + isReadableByteStreamController, + isReadableStreamBYOBRequest, + isReadableStreamDefaultReader, + isReadableStreamBYOBReader, + isWritableStreamDefaultWriter, + isWritableStreamDefaultController, + + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + isReadableStreamLocked, + readableStreamCancel, + readableStreamClose, + readableStreamError, + readableStreamHasDefaultReader, + readableStreamGetNumReadRequests, + readableStreamHasBYOBReader, + readableStreamGetNumReadIntoRequests, + readableStreamFulfillReadRequest, + readableStreamFulfillReadIntoRequest, + readableStreamAddReadRequest, + readableStreamAddReadIntoRequest, + readableStreamReaderGenericCancel, + readableStreamReaderGenericInitialize, + readableStreamReaderGenericRelease, + readableStreamBYOBReaderRead, + readableStreamDefaultReaderRead, + setupReadableStreamBYOBReader, + setupReadableStreamDefaultReader, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerHasBackpressure, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerShouldCallPull, + readableStreamDefaultControllerCallPullIfNeeded, + readableStreamDefaultControllerClearAlgorithms, + readableStreamDefaultControllerError, + readableStreamDefaultControllerCancelSteps, + readableStreamDefaultControllerPullSteps, + setupReadableStreamDefaultController, + setupReadableStreamDefaultControllerFromSource, + readableByteStreamControllerClose, + readableByteStreamControllerCommitPullIntoDescriptor, + readableByteStreamControllerInvalidateBYOBRequest, + readableByteStreamControllerClearAlgorithms, + readableByteStreamControllerClearPendingPullIntos, + readableByteStreamControllerGetDesiredSize, + readableByteStreamControllerShouldCallPull, + readableByteStreamControllerHandleQueueDrain, + readableByteStreamControllerPullInto, + readableByteStreamControllerRespondInternal, + readableByteStreamControllerRespond, + readableByteStreamControllerRespondInClosedState, + readableByteStreamControllerFillHeadPullIntoDescriptor, + readableByteStreamControllerEnqueue, + readableByteStreamControllerEnqueueChunkToQueue, + readableByteStreamControllerFillPullIntoDescriptorFromQueue, + readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue, + readableByteStreamControllerRespondInReadableState, + readableByteStreamControllerRespondWithNewView, + readableByteStreamControllerShiftPendingPullInto, + readableByteStreamControllerCallPullIfNeeded, + readableByteStreamControllerError, + readableByteStreamControllerCancelSteps, + readableByteStreamControllerPullSteps, + setupReadableByteStreamController, + setupReadableByteStreamControllerFromSource, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js new file mode 100644 index 00000000000000..72cdc36a153564 --- /dev/null +++ b/lib/internal/webstreams/transfer.js @@ -0,0 +1,299 @@ +'use strict'; + +const { + ObjectDefineProperties, + PromiseResolve, + ReflectConstruct, +} = primordials; + +const { + kState, + setPromiseHandled, +} = require('internal/webstreams/util'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + ReadableStream, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerError, + readableStreamPipeTo, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const { + createDeferredPromise, +} = require('internal/util'); + +const assert = require('internal/assert'); + +const { + makeTransferable, + kClone, + kDeserialize, +} = require('internal/worker/js_transferable'); + +// This class is a bit of a hack. The Node.js implementation of +// DOMException is not transferable/cloneable. This provides us +// with a variant that is. Unfortunately, it means playing around +// a bit with the message, name, and code properties and the +// prototype. We can revisit this if DOMException is ever made +// properly cloneable. +class CloneableDOMException extends DOMException { + constructor(message, name) { + super(message, name); + this[kDeserialize]({ + message: this.message, + name: this.name, + code: this.code, + }); + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + [kClone]() { + return { + data: { + message: this.message, + name: this.name, + code: this.code, + }, + deserializeInfo: + 'internal/webstreams/transfer:InternalCloneableDOMException' + }; + } + + [kDeserialize]({ message, name, code }) { + ObjectDefineProperties(this, { + message: { + configurable: true, + enumerable: true, + get() { return message; }, + }, + name: { + configurable: true, + enumerable: true, + get() { return name; }, + }, + code: { + configurable: true, + enumerable: true, + get() { return code; }, + }, + }); + } +} + +function InternalCloneableDOMException() { + return makeTransferable( + ReflectConstruct( + CloneableDOMException, + [], + DOMException)); +} +InternalCloneableDOMException[kDeserialize] = () => {}; + +class CrossRealmTransformReadableSource { + constructor(port) { + this[kState] = { + port, + controller: undefined, + }; + + port.onmessage = ({ data }) => { + const { + controller, + } = this[kState]; + const { + type, + value, + } = data; + switch (type) { + case 'chunk': + readableStreamDefaultControllerEnqueue( + controller, + value); + break; + case 'close': + readableStreamDefaultControllerClose(controller); + port.close(); + break; + case 'error': + readableStreamDefaultControllerError(controller, value); + port.close(); + break; + } + }; + + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + readableStreamDefaultControllerError( + this[kState].controller, + error); + port.close(); + }; + } + + start(controller) { + this[kState].controller = controller; + } + + async pull() { + this[kState].port.postMessage({ type: 'pull' }); + } + + async cancel(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +class CrossRealmTransformWritableSink { + constructor(port) { + this[kState] = { + port, + controller: undefined, + backpressurePromise: createDeferredPromise(), + }; + + port.onmessage = ({ data }) => { + assert(typeof data === 'object'); + const { + type, + value + } = { ...data }; + assert(typeof type === 'string'); + switch (type) { + case 'pull': + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + case 'error': + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + value); + if (this[kState].backpressurePromise !== undefined) + this[kState].backpressurePromise.resolve?.(); + this[kState].backpressurePromise = undefined; + break; + } + }; + port.onmessageerror = () => { + const error = new CloneableDOMException( + 'Internal transfered ReadableStream error', + 'DataCloneError'); + port.postMessage({ type: 'error', value: error }); + writableStreamDefaultControllerErrorIfNeeded( + this[kState].controller, + error); + port.close(); + }; + + } + + start(controller) { + this[kState].controller = controller; + } + + async write(chunk) { + if (this[kState].backpressurePromise === undefined) { + this[kState].backpressurePromise = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + await this[kState].backpressurePromise.promise; + this[kState].backpressurePromise = createDeferredPromise(); + try { + this[kState].port.postMessage({ type: 'chunk', value: chunk }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + this[kState].port.close(); + throw error; + } + } + + close() { + this[kState].port.postMessage({ type: 'close' }); + this[kState].port.close(); + } + + abort(reason) { + try { + this[kState].port.postMessage({ type: 'error', value: reason }); + } catch (error) { + if (error instanceof DOMException) { + // eslint-disable-next-line no-ex-assign + error = new CloneableDOMException(error.message, error.name); + } + this[kState].port.postMessage({ type: 'error', value: error }); + throw error; + } finally { + this[kState].port.close(); + } + } +} + +function newCrossRealmReadableStream(writable, port) { + const readable = + new ReadableStream( + new CrossRealmTransformReadableSource(port)); + + const promise = + readableStreamPipeTo(readable, writable, false, false, false); + + setPromiseHandled(promise); + + return { + readable, + promise, + }; +} + +function newCrossRealmWritableSink(readable, port) { + const writable = + new WritableStream( + new CrossRealmTransformWritableSink(port)); + + const promise = readableStreamPipeTo(readable, writable, false, false, false); + setPromiseHandled(promise); + return { + writable, + promise, + }; +} + +module.exports = { + newCrossRealmReadableStream, + newCrossRealmWritableSink, + CrossRealmTransformWritableSink, + CrossRealmTransformReadableSource, + CloneableDOMException, + InternalCloneableDOMException, +}; diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js new file mode 100644 index 00000000000000..745675266f7f1b --- /dev/null +++ b/lib/internal/webstreams/transformstream.js @@ -0,0 +1,591 @@ +'use strict'; + +const { + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeCatch, + PromisePrototypeThen, + PromiseResolve, + ReflectConstruct, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + ensureIsPromise, + extractHighWaterMark, + extractSizeAlgorithm, + isBrandCheck, + nonOpFlush, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + ReadableStream, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableStreamDefaultControllerClose, + readableStreamDefaultControllerEnqueue, + readableStreamDefaultControllerError, + readableStreamDefaultControllerGetDesiredSize, + readableStreamDefaultControllerHasBackpressure, +} = require('internal/webstreams/readablestream'); + +const { + WritableStream, + writableStreamDefaultControllerErrorIfNeeded, +} = require('internal/webstreams/writablestream'); + +const assert = require('internal/assert'); + +/** + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback TransformerStartCallback + * @param {TransformStreamDefaultController} controller; + * + * @callback TransformerFlushCallback + * @param {TransformStreamDefaultController} controller; + * @returns {Promise} + * + * @callback TransformerTransformCallback + * @param {any} chunk + * @param {TransformStreamDefaultController} controller + * @returns {Promise} + * + * @typedef {{ + * start? : TransformerStartCallback, + * transform? : TransformerTransformCallback, + * flush? : TransformerFlushCallback, + * readableType? : any, + * writableType? : any, + * }} Transformer + */ + +class TransformStream { + [kType] = 'TransformStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {Transformer} [transformer] + * @param {QueuingStrategy} [writableStrategy] + * @param {QueuingStrategy} [readableStrategy] + */ + constructor( + transformer = null, + writableStrategy = {}, + readableStrategy = {}) { + const readableType = transformer?.readableType; + const writableType = transformer?.writableType; + const start = transformer?.start; + + if (readableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.readableType', + readableType); + } + if (writableType !== undefined) { + throw new ERR_INVALID_ARG_VALUE.RangeError( + 'transformer.writableType', + writableType); + } + + const readableHighWaterMark = readableStrategy?.highWaterMark; + const readableSize = readableStrategy?.size; + + const writableHighWaterMark = writableStrategy?.highWaterMark; + const writableSize = writableStrategy?.size; + + const actualReadableHighWaterMark = + extractHighWaterMark(readableHighWaterMark, 0); + const actualReadableSize = extractSizeAlgorithm(readableSize); + + const actualWritableHighWaterMark = + extractHighWaterMark(writableHighWaterMark, 1); + const actualWritableSize = extractSizeAlgorithm(writableSize); + + const startPromise = createDeferredPromise(); + + initializeTransformStream( + this, + startPromise, + actualWritableHighWaterMark, + actualWritableSize, + actualReadableHighWaterMark, + actualReadableSize); + + setupTransformStreamDefaultControllerFromTransformer(this, transformer); + + if (start !== undefined) { + startPromise.resolve( + FunctionPrototypeCall( + start, + transformer, + this[kState].controller)); + } else { + startPromise.resolve(); + } + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {ReadableStream} + */ + get readable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].readable; + } + + /** + * @readonly + * @type {WritableStream} + */ + get writable() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + return this[kState].writable; + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + readable: this.readable, + writable: this.writable, + backpressure: this[kState].backpressure, + }); + } + + [kTransfer]() { + if (!isTransformStream(this)) + throw new ERR_INVALID_THIS('TransformStream'); + const { + readable, + writable, + } = this[kState]; + if (readable.locked) { + throw new DOMException( + 'Cannot transfer a locked ReadableStream', + 'DataCloneError'); + } + if (writable.locked) { + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + return { + data: { + readable, + writable, + }, + deserializeInfo: + 'internal/webstreams/transformstream:TransferedTransformStream' + }; + } + + [kTransferList]() { + return [ this[kState].readable, this[kState].writable ]; + } + + [kDeserialize]({ readable, writable }) { + this[kState].readable = readable; + this[kState].writable = writable; + } +} + +ObjectDefineProperties(TransformStream.prototype, { + readable: { enumerable: true }, + writable: { enumerable: true }, +}); + +function TransferedTransformStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'TransformStream'; + this[kState] = { + readable: undefined, + writable: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + controller: undefined, + }; + }, + [], TransformStream)); +} +TransferedTransformStream.prototype[kDeserialize] = () => {}; + +class TransformStreamDefaultController { + [kType] = 'TransformStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + const { + stream, + } = this[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + return readableStreamDefaultControllerGetDesiredSize(readableController); + } + + /** + * @param {any} chunk + */ + enqueue(chunk = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerEnqueue(this, chunk); + } + + /** + * @param {any} reason + */ + error(reason = undefined) { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerError(this, reason); + } + + terminate() { + if (!isTransformStreamDefaultController(this)) + throw new ERR_INVALID_THIS('TransformStreamDefaultController'); + transformStreamDefaultControllerTerminate(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(TransformStreamDefaultController.prototype, { + desiredSize: { enumerable: true }, + enqueue: { enumerable: true }, + error: { enumerable: true }, + terminate: { enumerable: true }, +}); + +function createTransformStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'TransformStreamDefaultController'; + }, + [], + TransformStreamDefaultController); +} + +const isTransformStream = + isBrandCheck('TransformStream'); +const isTransformStreamDefaultController = + isBrandCheck('TransformStreamDefaultController'); + +async function defaultTransformAlgorithm(chunk, controller) { + transformStreamDefaultControllerEnqueue(controller, chunk); +} + +function initializeTransformStream( + stream, + startPromise, + writableHighWaterMark, + writableSizeAlgorithm, + readableHighWaterMark, + readableSizeAlgorithm) { + + const writable = new WritableStream({ + start() { return startPromise.promise; }, + write(chunk) { + return transformStreamDefaultSinkWriteAlgorithm(stream, chunk); + }, + abort(reason) { + return transformStreamDefaultSinkAbortAlgorithm(stream, reason); + }, + close() { + return transformStreamDefaultSinkCloseAlgorithm(stream); + }, + }, { + highWaterMark: writableHighWaterMark, + size: writableSizeAlgorithm, + }); + + const readable = new ReadableStream({ + start() { return startPromise.promise; }, + pull() { + return transformStreamDefaultSourcePullAlgorithm(stream); + }, + cancel(reason) { + transformStreamErrorWritableAndUnblockWrite(stream, reason); + return PromiseResolve(); + }, + }, { + highWaterMark: readableHighWaterMark, + size: readableSizeAlgorithm, + }); + + stream[kState] = { + readable, + writable, + controller: undefined, + backpressure: undefined, + backpressureChange: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + + transformStreamSetBackpressure(stream, true); +} + +function transformStreamError(stream, error) { + const { + readable, + } = stream[kState]; + const { + controller, + } = readable[kState]; + readableStreamDefaultControllerError(controller, error); + transformStreamErrorWritableAndUnblockWrite(stream, error); +} + +function transformStreamErrorWritableAndUnblockWrite(stream, error) { + const { + controller, + writable, + } = stream[kState]; + transformStreamDefaultControllerClearAlgorithms(controller); + writableStreamDefaultControllerErrorIfNeeded( + writable[kState].controller, + error); + if (stream[kState].backpressure) + transformStreamSetBackpressure(stream, false); +} + +function transformStreamSetBackpressure(stream, backpressure) { + assert(stream[kState].backpressure !== backpressure); + if (stream[kState].backpressureChange.promise !== undefined) + stream[kState].backpressureChange.resolve?.(); + stream[kState].backpressureChange = createDeferredPromise(); + stream[kState].backpressure = backpressure; +} + +function setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm) { + assert(isTransformStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + stream, + transformAlgorithm, + flushAlgorithm, + }; + stream[kState].controller = controller; +} + +function setupTransformStreamDefaultControllerFromTransformer( + stream, + transformer) { + const controller = createTransformStreamDefaultController(); + const transform = transformer?.transform || defaultTransformAlgorithm; + const flush = transformer?.flush || nonOpFlush; + const transformAlgorithm = + FunctionPrototypeBind(transform, transformer); + const flushAlgorithm = + FunctionPrototypeBind(flush, transformer); + + setupTransformStreamDefaultController( + stream, + controller, + transformAlgorithm, + flushAlgorithm); +} + +function transformStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].transformAlgorithm = undefined; + controller[kState].flushAlgorithm = undefined; +} + +function transformStreamDefaultControllerEnqueue(controller, chunk) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + const { + controller: readableController, + } = readable[kState]; + if (!readableStreamDefaultControllerCanCloseOrEnqueue(readableController)) + throw new ERR_INVALID_STATE.TypeError('Unable to enqueue'); + try { + readableStreamDefaultControllerEnqueue(readableController, chunk); + } catch (error) { + transformStreamErrorWritableAndUnblockWrite(stream, error); + throw readable[kState].storedError; + } + const backpressure = + readableStreamDefaultControllerHasBackpressure(readableController); + if (backpressure !== stream[kState].backpressure) { + assert(backpressure); + transformStreamSetBackpressure(stream, true); + } +} + +function transformStreamDefaultControllerError(controller, error) { + transformStreamError(controller[kState].stream, error); +} + +function transformStreamDefaultControllerPerformTransform(controller, chunk) { + const transformPromise = + ensureIsPromise( + controller[kState].transformAlgorithm, + controller, + chunk, + controller); + return PromisePrototypeCatch( + transformPromise, + (error) => { + transformStreamError(controller[kState].stream, error); + throw error; + }); +} + +function transformStreamDefaultControllerTerminate(controller) { + const { + stream, + } = controller[kState]; + const { + readable, + } = stream[kState]; + assert(readable !== undefined); + const { + controller: readableController, + } = readable[kState]; + readableStreamDefaultControllerClose(readableController); + transformStreamErrorWritableAndUnblockWrite( + stream, + new ERR_INVALID_STATE.TypeError('TransformStream has been terminated')); +} + +function transformStreamDefaultSinkWriteAlgorithm(stream, chunk) { + const { + writable, + controller, + } = stream[kState]; + assert(writable[kState].state === 'writable'); + if (stream[kState].backpressure) { + const backpressureChange = stream[kState].backpressureChange.promise; + return PromisePrototypeThen( + backpressureChange, + () => { + const { + writable, + } = stream[kState]; + if (writable[kState].state === 'erroring') + throw writable[kState].storedError; + assert(writable[kState].state === 'writable'); + return transformStreamDefaultControllerPerformTransform( + controller, + chunk); + }); + } + return transformStreamDefaultControllerPerformTransform(controller, chunk); +} + +async function transformStreamDefaultSinkAbortAlgorithm(stream, reason) { + transformStreamError(stream, reason); +} + +function transformStreamDefaultSinkCloseAlgorithm(stream) { + const { + readable, + controller, + } = stream[kState]; + + const flushPromise = + ensureIsPromise( + controller[kState].flushAlgorithm, + controller, + controller); + transformStreamDefaultControllerClearAlgorithms(controller); + return PromisePrototypeThen( + flushPromise, + () => { + if (readable[kState].state === 'errored') + throw readable[kState].storedError; + readableStreamDefaultControllerClose(readable[kState].controller); + }, + (error) => { + transformStreamError(stream, error); + throw readable[kState].storedError; + }); +} + +function transformStreamDefaultSourcePullAlgorithm(stream) { + assert(stream[kState].backpressure); + assert(stream[kState].backpressureChange.promise !== undefined); + transformStreamSetBackpressure(stream, false); + return stream[kState].backpressureChange.promise; +} + +module.exports = { + TransformStream, + TransformStreamDefaultController, + TransferedTransformStream, + + // Exported Brand Checks + isTransformStream, + isTransformStreamDefaultController, +}; diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js new file mode 100644 index 00000000000000..e0876caf81b944 --- /dev/null +++ b/lib/internal/webstreams/util.js @@ -0,0 +1,237 @@ +'use strict'; + +const { + ArrayBufferPrototype, + ArrayPrototypePush, + ArrayPrototypeShift, + AsyncIteratorPrototype, + FunctionPrototypeCall, + MathMax, + NumberIsNaN, + ObjectCreate, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectGet, + Symbol, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_OPERATION_FAILED, + }, +} = require('internal/errors'); + +const { + copyArrayBuffer, + detachArrayBuffer +} = internalBinding('buffer'); + +const { + isPromise, +} = require('util/types'); + +const { + inspect, +} = require('util'); + +const { + getPromiseDetails, + kPending, +} = internalBinding('util'); + +const assert = require('internal/assert'); + +const kState = Symbol('kState'); +const kType = Symbol('kType'); + +const AsyncIterator = ObjectCreate(AsyncIteratorPrototype, { + next: { + configurable: true, + enumerable: true, + writable: true, + }, + return: { + configurable: true, + enumerable: true, + writable: true, + }, +}); + +function extractHighWaterMark(value, defaultHWM) { + if (value === undefined) return defaultHWM; + value = +value; + if (typeof value !== 'number' || + NumberIsNaN(value) || + value < 0) + throw new ERR_INVALID_ARG_VALUE.RangeError('strategy.highWaterMark', value); + return value; +} + +function extractSizeAlgorithm(size) { + if (size === undefined) return () => 1; + if (typeof size !== 'function') + throw new ERR_INVALID_ARG_TYPE('strategy.size', 'Function', size); + return size; +} + +function customInspect(depth, options, name, data) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1 + }; + + return `${name} ${inspect(data, opts)}`; +} + +// These are defensive to work around the possibility that +// the buffer, byteLength, and byteOffset properties on +// ArrayBuffer and ArrayBufferView's may have been tampered with. + +function ArrayBufferViewGetBuffer(view) { + return ReflectGet(view.constructor.prototype, 'buffer', view); +} + +function ArrayBufferViewGetByteLength(view) { + return ReflectGet(view.constructor.prototype, 'byteLength', view); +} + +function ArrayBufferViewGetByteOffset(view) { + return ReflectGet(view.constructor.prototype, 'byteOffset', view); +} + +function ArrayBufferGetByteLength(view) { + return ReflectGet(ArrayBufferPrototype, 'byteLength', view); +} + +function isBrandCheck(brand) { + return (value) => { + return value != null && + value[kState] !== undefined && + value[kType] === brand; + }; +} + +function transferArrayBuffer(buffer) { + const res = detachArrayBuffer(buffer); + if (res === undefined) { + throw new ERR_OPERATION_FAILED.TypeError( + 'The ArrayBuffer could not be transfered'); + } + return res; +} + +function dequeueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + const { + value, + size, + } = ArrayPrototypeShift(controller[kState].queue); + controller[kState].queueTotalSize = + MathMax(0, controller[kState].queueTotalSize - size); + return value; +} + +function resetQueue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + controller[kState].queue = []; + controller[kState].queueTotalSize = 0; +} + +function peekQueueValue(controller) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + assert(controller[kState].queue.length); + return controller[kState].queue[0].value; +} + +function enqueueValueWithSize(controller, value, size) { + assert(controller[kState].queue !== undefined); + assert(controller[kState].queueTotalSize !== undefined); + size = +size; + if (typeof size !== 'number' || + size < 0 || + NumberIsNaN(size) || + size === Infinity) { + throw new ERR_INVALID_ARG_VALUE.RangeError('size', size); + } + ArrayPrototypePush(controller[kState].queue, { value, size }); + controller[kState].queueTotalSize += size; +} + +function ensureIsPromise(fn, thisArg, ...args) { + try { + const value = FunctionPrototypeCall(fn, thisArg, ...args); + return isPromise(value) ? value : PromiseResolve(value); + } catch (error) { + return PromiseReject(error); + } +} + +function isPromisePending(promise) { + if (promise === undefined) return false; + const details = getPromiseDetails(promise); + return details?.[0] === kPending; +} + +function setPromiseHandled(promise) { + // Alternatively, we could use the native API + // MarkAsHandled, but this avoids the extra boundary cross + // and is hopefully faster at the cost of an extra Promise + // allocation. + PromisePrototypeThen(promise, () => {}, () => {}); +} + +async function nonOpFlush() {} + +function nonOpStart() {} + +async function nonOpPull() {} + +async function nonOpCancel() {} + +async function nonOpWrite() {} + +let transfer; +function lazyTransfer() { + if (transfer === undefined) + transfer = require('internal/webstreams/transfer'); + return transfer; +} + +module.exports = { + ArrayBufferViewGetBuffer, + ArrayBufferViewGetByteLength, + ArrayBufferViewGetByteOffset, + ArrayBufferGetByteLength, + AsyncIterator, + copyArrayBuffer, + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + transferArrayBuffer, + nonOpCancel, + nonOpFlush, + nonOpPull, + nonOpStart, + nonOpWrite, + kType, + kState, +}; diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js new file mode 100644 index 00000000000000..793ae9e9ad8fb4 --- /dev/null +++ b/lib/internal/webstreams/writablestream.js @@ -0,0 +1,1329 @@ +'use strict'; + +/* eslint-disable no-use-before-define */ + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + FunctionPrototypeBind, + FunctionPrototypeCall, + ObjectDefineProperties, + PromisePrototypeThen, + PromiseResolve, + PromiseReject, + ReflectConstruct, + Symbol, + SymbolToStringTag, +} = primordials; + +const { + codes: { + ERR_ILLEGAL_CONSTRUCTOR, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_ARG_TYPE, + ERR_INVALID_STATE, + ERR_INVALID_THIS, + }, +} = require('internal/errors'); + +const { + DOMException, +} = internalBinding('messaging'); + +const { + createDeferredPromise, + customInspectSymbol: kInspect, +} = require('internal/util'); + +const { + MessageChannel, +} = require('internal/worker/io'); + +const { + kDeserialize, + kTransfer, + kTransferList, + makeTransferable, +} = require('internal/worker/js_transferable'); + +const { + customInspect, + dequeueValue, + ensureIsPromise, + enqueueValueWithSize, + extractHighWaterMark, + extractSizeAlgorithm, + lazyTransfer, + isBrandCheck, + isPromisePending, + peekQueueValue, + resetQueue, + setPromiseHandled, + nonOpCancel, + nonOpStart, + nonOpWrite, + kType, + kState, +} = require('internal/webstreams/util'); + +const { + AbortController, +} = require('internal/abort_controller'); + +const assert = require('internal/assert'); + +const kAbort = Symbol('kAbort'); +const kCloseSentinel = Symbol('kCloseSentinel'); +const kError = Symbol('kError'); + +/** + * @typedef {import('../abort_controller').AbortSignal} AbortSignal + * @typedef {import('./queuingstrategies').QueuingStrategy + * } QueuingStrategy + * @typedef {import('./queuingstrategies').QueuingStrategySize + * } QueuingStrategySize + * + * @callback UnderlyingSinkStartCallback + * @param {WritableStreamDefaultController} controller + * + * @callback UnderlyingSinkWriteCallback + * @param {any} chunk + * @param {WritableStreamDefaultController} controller + * @returns {Promise} + * + * @callback UnderlyingSinkCloseCallback + * @returns {Promise} + * + * @callback UnderlyingSinkAbortCallback + * @param {any} reason + * @returns {Promise} + * + * @typedef {{ + * start? : UnderlyingSinkStartCallback, + * write? : UnderlyingSinkWriteCallback, + * close? : UnderlyingSinkCloseCallback, + * abort? : UnderlyingSinkAbortCallback, + * type? : any, + * }} UnderlyingSink + */ + +class WritableStream { + [kType] = 'WritableStream'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {UnderlyingSink} [sink] + * @param {QueuingStrategy} [strategy] + */ + constructor(sink = null, strategy = {}) { + const type = sink?.type; + if (type !== undefined) + throw new ERR_INVALID_ARG_VALUE.RangeError('type', type); + + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + readable: undefined, + port1: undefined, + port2: undefined, + promise: undefined, + } + }; + + const size = extractSizeAlgorithm(strategy?.size); + const highWaterMark = extractHighWaterMark(strategy?.highWaterMark, 1); + + setupWritableStreamDefaultControllerFromSink( + this, + sink, + highWaterMark, + size); + + // eslint-disable-next-line no-constructor-return + return makeTransferable(this); + } + + /** + * @readonly + * @type {boolean} + */ + get locked() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return isWritableStreamLocked(this); + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + return writableStreamAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStream(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStream')); + if (isWritableStreamLocked(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is locked')); + } + if (writableStreamCloseQueuedOrInFlight(this)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure closing WritableStream')); + } + return writableStreamClose(this); + } + + /** + * @returns {WritableStreamDefaultWriter} + */ + getWriter() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + return new WritableStreamDefaultWriter(this); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + locked: this.locked, + state: this[kState].state, + }); + } + + [kTransfer]() { + if (!isWritableStream(this)) + throw new ERR_INVALID_THIS('WritableStream'); + if (this.locked) { + this[kState].transfer.port1?.close(); + this[kState].transfer.port1 = undefined; + this[kState].transfer.port2 = undefined; + throw new DOMException( + 'Cannot transfer a locked WritableStream', + 'DataCloneError'); + } + + const { + readable, + promise, + } = lazyTransfer().newCrossRealmReadableStream( + this, + this[kState].transfer.port1); + + this[kState].transfer.readable = readable; + this[kState].transfer.promise = promise; + + setPromiseHandled(this[kState].transfer.promise); + + return { + data: { port: this[kState].transfer.port2 }, + deserializeInfo: + 'internal/webstreams/writablestream:TransferedWritableStream' + }; + } + + [kTransferList]() { + const { port1, port2 } = new MessageChannel(); + this[kState].transfer.port1 = port1; + this[kState].transfer.port2 = port2; + return [ port2 ]; + } + + [kDeserialize]({ port }) { + const transfer = lazyTransfer(); + setupWritableStreamDefaultControllerFromSink( + this, + new transfer.CrossRealmTransformWritableSink(port), + 1, + () => 1); + } +} + +ObjectDefineProperties(WritableStream.prototype, { + locked: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + getWriter: { enumerable: true }, +}); + +function TransferedWritableStream() { + return makeTransferable(ReflectConstruct( + function() { + this[kType] = 'WritableStream'; + this[kState] = { + close: createDeferredPromise(), + closeRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightWriteRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + inFlightCloseRequest: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + pendingAbortRequest: { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }, + backpressure: false, + controller: undefined, + state: 'writable', + storedError: undefined, + writeRequests: [], + writer: undefined, + transfer: { + promise: undefined, + port1: undefined, + port2: undefined, + readable: undefined, + }, + }; + }, + [], WritableStream)); +} +TransferedWritableStream.prototype[kDeserialize] = () => {}; + +class WritableStreamDefaultWriter { + [kType] = 'WritableStreamDefaultWriter'; + + get [SymbolToStringTag]() { return this[kType]; } + + /** + * @param {WritableStream} stream + */ + constructor(stream) { + if (!isWritableStream(stream)) + throw new ERR_INVALID_ARG_TYPE('stream', 'WritableStream', stream); + this[kState] = { + stream: undefined, + close: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + ready: { + promise: undefined, + resolve: undefined, + reject: undefined, + } + }; + setupWritableStreamDefaultWriter(this, stream); + } + + /** + * @readonly + * @type {Promise} + */ + get closed() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].close.promise; + } + + /** + * @readonly + * @type {number} + */ + get desiredSize() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + if (this[kState].stream === undefined) { + throw new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream'); + } + return writableStreamDefaultWriterGetDesiredSize(this); + } + + /** + * @readonly + * @type {Promise} + */ + get ready() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + return this[kState].ready.promise; + } + + /** + * @param {any} reason + * @returns {Promise} + */ + abort(reason = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterAbort(this, reason); + } + + /** + * @returns {Promise} + */ + close() { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + const { + stream, + } = this[kState]; + if (stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + if (writableStreamCloseQueuedOrInFlight(stream)) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Failure to close WritableStream')); + } + return writableStreamDefaultWriterClose(this); + } + + releaseLock() { + if (!isWritableStreamDefaultWriter(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultWriter'); + const { + stream, + } = this[kState]; + if (stream === undefined) + return; + assert(stream[kState].writer !== undefined); + writableStreamDefaultWriterRelease(this); + } + + /** + * @param {any} chunk + * @returns {Promise} + */ + write(chunk = undefined) { + if (!isWritableStreamDefaultWriter(this)) + return PromiseReject(new ERR_INVALID_THIS('WritableStreamDefaultWriter')); + if (this[kState].stream === undefined) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError( + 'Writer is not bound to a WritableStream')); + } + return writableStreamDefaultWriterWrite(this, chunk); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + close: this[kState].close.promise, + ready: this[kState].ready.promise, + desiredSize: this.desiredSize, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultWriter.prototype, { + closed: { enumerable: true }, + ready: { enumerable: true }, + desiredSize: { enumerable: true }, + abort: { enumerable: true }, + close: { enumerable: true }, + releaseLock: { enumerable: true }, + write: { enumerable: true }, +}); + +class WritableStreamDefaultController { + [kType] = 'WritableStreamDefaultController'; + + get [SymbolToStringTag]() { return this[kType]; } + + constructor() { + throw new ERR_ILLEGAL_CONSTRUCTOR(); + } + + [kAbort](reason) { + const result = this[kState].abortAlgorithm(reason); + writableStreamDefaultControllerClearAlgorithms(this); + return result; + } + + [kError]() { + resetQueue(this); + } + + /** + * @type {any} + */ + get abortReason() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortReason; + } + + /** + * @type {AbortSignal} + */ + get signal() { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + return this[kState].abortController.signal; + } + + /** + * @param {any} error + */ + error(error = undefined) { + if (!isWritableStreamDefaultController(this)) + throw new ERR_INVALID_THIS('WritableStreamDefaultController'); + if (this[kState].stream[kState].state !== 'writable') + return; + writableStreamDefaultControllerError(this, error); + } + + [kInspect](depth, options) { + return customInspect(depth, options, this[kType], { + stream: this[kState].stream, + }); + } +} + +ObjectDefineProperties(WritableStreamDefaultController.prototype, { + abortReason: { enumerable: true }, + signal: { enumerable: true }, + error: { enumerable: true }, +}); + +function createWritableStreamDefaultController() { + return ReflectConstruct( + function() { + this[kType] = 'WritableStreamDefaultController'; + }, + [], WritableStreamDefaultController); +} + +const isWritableStream = + isBrandCheck('WritableStream'); +const isWritableStreamDefaultWriter = + isBrandCheck('WritableStreamDefaultWriter'); +const isWritableStreamDefaultController = + isBrandCheck('WritableStreamDefaultController'); + +function isWritableStreamLocked(stream) { + return stream[kState].writer !== undefined; +} + +function setupWritableStreamDefaultWriter(writer, stream) { + if (isWritableStreamLocked(stream)) + throw new ERR_INVALID_STATE.TypeError('WritableStream is locked'); + writer[kState].stream = stream; + stream[kState].writer = writer; + switch (stream[kState].state) { + case 'writable': + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + } + setClosedPromiseToNewPromise(); + break; + case 'erroring': + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setClosedPromiseToNewPromise(); + break; + case 'closed': + writer[kState].ready = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseResolve(), + resolve: undefined, + reject: undefined, + }; + break; + default: + writer[kState].ready = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + writer[kState].close = { + promise: PromiseReject(stream[kState].storedError), + resolve: undefined, + reject: undefined, + }; + setPromiseHandled(writer[kState].ready.promise); + setPromiseHandled(writer[kState].close.promise); + } + + function setClosedPromiseToNewPromise() { + writer[kState].close = createDeferredPromise(); + } +} + +function writableStreamAbort(stream, reason) { + const { + state, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') + return PromiseResolve(); + + controller[kState].abortReason = reason; + controller[kState].abortController.abort(); + + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) + return stream[kState].pendingAbortRequest.abort.promise; + + assert(state === 'writable' || state === 'erroring'); + + let wasAlreadyErroring = false; + if (state === 'erroring') { + wasAlreadyErroring = true; + reason = undefined; + } + + const abort = createDeferredPromise(); + + stream[kState].pendingAbortRequest = { + abort, + reason, + wasAlreadyErroring, + }; + + if (!wasAlreadyErroring) + writableStreamStartErroring(stream, reason); + + return abort.promise; +} + +function writableStreamClose(stream) { + const { + state, + writer, + backpressure, + controller, + } = stream[kState]; + if (state === 'closed' || state === 'errored') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + assert(state === 'writable' || state === 'erroring'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + stream[kState].closeRequest = createDeferredPromise(); + const { promise } = stream[kState].closeRequest; + if (writer !== undefined && backpressure && state === 'writable') + writer[kState].ready.resolve?.(); + writableStreamDefaultControllerClose(controller); + return promise; +} + +function writableStreamUpdateBackpressure(stream, backpressure) { + assert(stream[kState].state === 'writable'); + assert(!writableStreamCloseQueuedOrInFlight(stream)); + const { + writer, + } = stream[kState]; + if (writer !== undefined && stream[kState].backpressure !== backpressure) { + if (backpressure) { + writer[kState].ready = createDeferredPromise(); + } else { + writer[kState].ready.resolve?.(); + } + } + stream[kState].backpressure = backpressure; +} + +function writableStreamStartErroring(stream, reason) { + assert(stream[kState].storedError === undefined); + assert(stream[kState].state === 'writable'); + const { + controller, + writer, + } = stream[kState]; + assert(controller !== undefined); + stream[kState].state = 'erroring'; + stream[kState].storedError = reason; + if (writer !== undefined) { + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason); + } + if (!writableStreamHasOperationMarkedInFlight(stream) && + controller[kState].started) { + writableStreamFinishErroring(stream); + } +} + +function writableStreamRejectCloseAndClosedPromiseIfNeeded(stream) { + assert(stream[kState].state === 'errored'); + if (stream[kState].closeRequest.promise !== undefined) { + assert(stream[kState].inFlightCloseRequest.promise === undefined); + stream[kState].closeRequest.reject?.(stream[kState].storedError); + stream[kState].closeRequest = { + promise: undefined, + reject: undefined, + resolve: undefined, + }; + } + const { + writer, + } = stream[kState]; + if (writer !== undefined) { + writer[kState].close.reject?.(stream[kState].storedError); + setPromiseHandled(writer[kState].close.promise); + } +} + +function writableStreamMarkFirstWriteRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].writeRequests.length); + const writeRequest = ArrayPrototypeShift(stream[kState].writeRequests); + stream[kState].inFlightWriteRequest = writeRequest; +} + +function writableStreamMarkCloseRequestInFlight(stream) { + assert(stream[kState].inFlightWriteRequest.promise === undefined); + assert(stream[kState].closeRequest.promise !== undefined); + stream[kState].inFlightCloseRequest = stream[kState].closeRequest; + stream[kState].closeRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamHasOperationMarkedInFlight(stream) { + const { + inFlightWriteRequest, + inFlightCloseRequest, + } = stream[kState]; + if (inFlightWriteRequest.promise === undefined && + inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamFinishInFlightWriteWithError(stream, error) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.reject?.(error); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightWrite(stream) { + assert(stream[kState].inFlightWriteRequest.promise !== undefined); + stream[kState].inFlightWriteRequest.resolve?.(); + stream[kState].inFlightWriteRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; +} + +function writableStreamFinishInFlightCloseWithError(stream, error) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.reject?.(error); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.reject?.(error); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + writableStreamDealWithRejection(stream, error); +} + +function writableStreamFinishInFlightClose(stream) { + assert(stream[kState].inFlightCloseRequest.promise !== undefined); + stream[kState].inFlightCloseRequest.resolve?.(); + stream[kState].inFlightCloseRequest = { + promise: undefined, + resolve: undefined, + reject: undefined, + }; + if (stream[kState].state === 'erroring') { + stream[kState].storedError = undefined; + if (stream[kState].pendingAbortRequest.abort.promise !== undefined) { + stream[kState].pendingAbortRequest.abort.resolve?.(); + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + } + } + stream[kState].state = 'closed'; + if (stream[kState].writer !== undefined) + stream[kState].writer[kState].close.resolve?.(); + assert(stream[kState].pendingAbortRequest.abort.promise === undefined); + assert(stream[kState].storedError === undefined); +} + +function writableStreamFinishErroring(stream) { + assert(stream[kState].state === 'erroring'); + assert(!writableStreamHasOperationMarkedInFlight(stream)); + stream[kState].state = 'errored'; + stream[kState].controller[kError](); + const storedError = stream[kState].storedError; + for (let n = 0; n < stream[kState].writeRequests.length; n++) + stream[kState].writeRequests[n].reject?.(storedError); + stream[kState].writeRequests = []; + + if (stream[kState].pendingAbortRequest.abort.promise === undefined) { + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + + const abortRequest = stream[kState].pendingAbortRequest; + stream[kState].pendingAbortRequest = { + abort: { + promise: undefined, + resolve: undefined, + reject: undefined, + }, + reason: undefined, + wasAlreadyErroring: false, + }; + if (abortRequest.wasAlreadyErroring) { + abortRequest.abort.reject?.(storedError); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + return; + } + PromisePrototypeThen( + ensureIsPromise( + stream[kState].controller[kAbort], + stream[kState].controller, + abortRequest.reason), + () => { + abortRequest.abort.resolve?.(); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }, + (error) => { + abortRequest.abort.reject?.(error); + writableStreamRejectCloseAndClosedPromiseIfNeeded(stream); + }); +} + +function writableStreamDealWithRejection(stream, error) { + const { + state, + } = stream[kState]; + if (state === 'writable') { + writableStreamStartErroring(stream, error); + return; + } + + assert(state === 'erroring'); + writableStreamFinishErroring(stream); +} + +function writableStreamCloseQueuedOrInFlight(stream) { + if (stream[kState].closeRequest.promise === undefined && + stream[kState].inFlightCloseRequest.promise === undefined) { + return false; + } + return true; +} + +function writableStreamAddWriteRequest(stream) { + assert(isWritableStreamLocked(stream)); + assert(stream[kState].state === 'writable'); + const { + promise, + resolve, + reject, + } = createDeferredPromise(); + ArrayPrototypePush( + stream[kState].writeRequests, + { + promise, + resolve, + reject, + }); + return promise; +} + +function writableStreamDefaultWriterWrite(writer, chunk) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + controller, + } = stream[kState]; + const chunkSize = writableStreamDefaultControllerGetChunkSize( + controller, + chunk); + if (stream !== writer[kState].stream) { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('Mismatched WritableStreams')); + } + const { + state, + } = stream[kState]; + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') { + return PromiseReject( + new ERR_INVALID_STATE.TypeError('WritableStream is closed')); + } + + if (state === 'erroring') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable'); + + const promise = writableStreamAddWriteRequest(stream); + writableStreamDefaultControllerWrite(controller, chunk, chunkSize); + return promise; +} + +function writableStreamDefaultWriterRelease(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + assert(stream[kState].writer === writer); + const releasedError = + new ERR_INVALID_STATE.TypeError('Writer has been released'); + writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError); + writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError); + stream[kState].writer = undefined; + writer[kState].stream = undefined; +} + +function writableStreamDefaultWriterGetDesiredSize(writer) { + const { + stream, + } = writer[kState]; + switch (stream[kState].state) { + case 'errored': + // Fall through + case 'erroring': + return null; + case 'closed': + return 0; + } + return writableStreamDefaultControllerGetDesiredSize( + stream[kState].controller); +} + +function writableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].ready.promise)) { + writer[kState].ready.reject?.(error); + } else { + writer[kState].ready = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].ready.promise); +} + +function writableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) { + if (isPromisePending(writer[kState].close.promise)) { + writer[kState].close.reject?.(error); + } else { + writer[kState].close = { + promise: PromiseReject(error), + resolve: undefined, + reject: undefined, + }; + } + setPromiseHandled(writer[kState].close.promise); +} + +function writableStreamDefaultWriterCloseWithErrorPropagation(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + const { + state, + } = stream[kState]; + if (writableStreamCloseQueuedOrInFlight(stream) || state === 'closed') + return PromiseResolve(); + + if (state === 'errored') + return PromiseReject(stream[kState].storedError); + + assert(state === 'writable' || state === 'erroring'); + + return writableStreamDefaultWriterClose(writer); +} + +function writableStreamDefaultWriterClose(writer) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamClose(stream); +} + +function writableStreamDefaultWriterAbort(writer, reason) { + const { + stream, + } = writer[kState]; + assert(stream !== undefined); + return writableStreamAbort(stream, reason); +} + +function writableStreamDefaultControllerWrite(controller, chunk, chunkSize) { + try { + enqueueValueWithSize(controller, chunk, chunkSize); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return; + } + const { + stream, + } = controller[kState]; + if (!writableStreamCloseQueuedOrInFlight(stream) && + stream[kState].state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerProcessWrite(controller, chunk) { + const { + stream, + writeAlgorithm, + } = controller[kState]; + writableStreamMarkFirstWriteRequestInFlight(stream); + + PromisePrototypeThen( + ensureIsPromise(writeAlgorithm, controller, chunk, controller), + () => { + writableStreamFinishInFlightWrite(stream); + const { + state, + } = stream[kState]; + assert(state === 'writable' || state === 'erroring'); + dequeueValue(controller); + if (!writableStreamCloseQueuedOrInFlight(stream) && + state === 'writable') { + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + } + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + if (stream[kState].state === 'writable') + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamFinishInFlightWriteWithError(stream, error); + }); + +} + +function writableStreamDefaultControllerProcessClose(controller) { + const { + closeAlgorithm, + queue, + stream, + } = controller[kState]; + writableStreamMarkCloseRequestInFlight(stream); + dequeueValue(controller); + assert(!queue.length); + const sinkClosePromise = ensureIsPromise(closeAlgorithm, controller); + writableStreamDefaultControllerClearAlgorithms(controller); + PromisePrototypeThen( + sinkClosePromise, + () => writableStreamFinishInFlightClose(stream), + (error) => writableStreamFinishInFlightCloseWithError(stream, error)); +} + +function writableStreamDefaultControllerGetDesiredSize(controller) { + const { + highWaterMark, + queueTotalSize, + } = controller[kState]; + return highWaterMark - queueTotalSize; +} + +function writableStreamDefaultControllerGetChunkSize(controller, chunk) { + try { + return FunctionPrototypeCall( + controller[kState].sizeAlgorithm, + undefined, + chunk); + } catch (error) { + writableStreamDefaultControllerErrorIfNeeded(controller, error); + return 1; + } +} + +function writableStreamDefaultControllerErrorIfNeeded(controller, error) { + const { + stream, + } = controller[kState]; + if (stream[kState].state === 'writable') + writableStreamDefaultControllerError(controller, error); +} + +function writableStreamDefaultControllerError(controller, error) { + const { + stream, + } = controller[kState]; + assert(stream[kState].state === 'writable'); + writableStreamDefaultControllerClearAlgorithms(controller); + writableStreamStartErroring(stream, error); +} + +function writableStreamDefaultControllerClose(controller) { + enqueueValueWithSize(controller, kCloseSentinel, 0); + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); +} + +function writableStreamDefaultControllerClearAlgorithms(controller) { + controller[kState].writeAlgorithm = undefined; + controller[kState].closeAlgorithm = undefined; + controller[kState].abortAlgorithm = undefined; + controller[kState].sizeAlgorithm = undefined; +} + +function writableStreamDefaultControllerGetBackpressure(controller) { + return writableStreamDefaultControllerGetDesiredSize(controller) <= 0; +} + +function writableStreamDefaultControllerAdvanceQueueIfNeeded(controller) { + const { + queue, + started, + stream, + } = controller[kState]; + if (!started || stream[kState].inFlightWriteRequest.promise !== undefined) + return; + + if (stream[kState].state === 'erroring') { + writableStreamFinishErroring(stream); + return; + } + + if (!queue.length) + return; + + const value = peekQueueValue(controller); + if (value === kCloseSentinel) + writableStreamDefaultControllerProcessClose(controller); + else + writableStreamDefaultControllerProcessWrite(controller, value); +} + +function setupWritableStreamDefaultControllerFromSink( + stream, + sink, + highWaterMark, + sizeAlgorithm) { + const controller = createWritableStreamDefaultController(); + const start = sink?.start; + const write = sink?.write; + const close = sink?.close; + const abort = sink?.abort; + const startAlgorithm = start ? + FunctionPrototypeBind(start, sink, controller) : + nonOpStart; + const writeAlgorithm = write ? + FunctionPrototypeBind(write, sink) : + nonOpWrite; + const closeAlgorithm = close ? + FunctionPrototypeBind(close, sink) : nonOpCancel; + const abortAlgorithm = abort ? + FunctionPrototypeBind(abort, sink) : nonOpCancel; + setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm); +} + +function setupWritableStreamDefaultController( + stream, + controller, + startAlgorithm, + writeAlgorithm, + closeAlgorithm, + abortAlgorithm, + highWaterMark, + sizeAlgorithm) { + assert(isWritableStream(stream)); + assert(stream[kState].controller === undefined); + controller[kState] = { + abortAlgorithm, + abortReason: undefined, + closeAlgorithm, + highWaterMark, + queue: [], + queueTotalSize: 0, + abortController: new AbortController(), + sizeAlgorithm, + started: false, + stream, + writeAlgorithm, + }; + stream[kState].controller = controller; + + writableStreamUpdateBackpressure( + stream, + writableStreamDefaultControllerGetBackpressure(controller)); + + const startResult = startAlgorithm(); + + PromisePrototypeThen( + PromiseResolve(startResult), + () => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDefaultControllerAdvanceQueueIfNeeded(controller); + }, + (error) => { + assert(stream[kState].state === 'writable' || + stream[kState].state === 'erroring'); + controller[kState].started = true; + writableStreamDealWithRejection(stream, error); + }); +} + +module.exports = { + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + TransferedWritableStream, + + // Exported Brand Checks + isWritableStream, + isWritableStreamDefaultController, + isWritableStreamDefaultWriter, + + isWritableStreamLocked, + setupWritableStreamDefaultWriter, + writableStreamAbort, + writableStreamClose, + writableStreamUpdateBackpressure, + writableStreamStartErroring, + writableStreamRejectCloseAndClosedPromiseIfNeeded, + writableStreamMarkFirstWriteRequestInFlight, + writableStreamMarkCloseRequestInFlight, + writableStreamHasOperationMarkedInFlight, + writableStreamFinishInFlightWriteWithError, + writableStreamFinishInFlightWrite, + writableStreamFinishInFlightCloseWithError, + writableStreamFinishInFlightClose, + writableStreamFinishErroring, + writableStreamDealWithRejection, + writableStreamCloseQueuedOrInFlight, + writableStreamAddWriteRequest, + writableStreamDefaultWriterWrite, + writableStreamDefaultWriterRelease, + writableStreamDefaultWriterGetDesiredSize, + writableStreamDefaultWriterEnsureReadyPromiseRejected, + writableStreamDefaultWriterEnsureClosedPromiseRejected, + writableStreamDefaultWriterCloseWithErrorPropagation, + writableStreamDefaultWriterClose, + writableStreamDefaultWriterAbort, + writableStreamDefaultControllerWrite, + writableStreamDefaultControllerProcessWrite, + writableStreamDefaultControllerProcessClose, + writableStreamDefaultControllerGetDesiredSize, + writableStreamDefaultControllerGetChunkSize, + writableStreamDefaultControllerErrorIfNeeded, + writableStreamDefaultControllerError, + writableStreamDefaultControllerClose, + writableStreamDefaultControllerClearAlgorithms, + writableStreamDefaultControllerGetBackpressure, + writableStreamDefaultControllerAdvanceQueueIfNeeded, + setupWritableStreamDefaultControllerFromSink, + setupWritableStreamDefaultController, +}; + +/* eslint-enable no-use-before-define */ diff --git a/lib/stream/web.js b/lib/stream/web.js new file mode 100644 index 00000000000000..929abd19044458 --- /dev/null +++ b/lib/stream/web.js @@ -0,0 +1,48 @@ +'use strict'; + +const { + emitExperimentalWarning, +} = require('internal/util'); + +emitExperimentalWarning('stream/web'); + +const { + TransformStream, + TransformStreamDefaultController, +} = require('internal/webstreams/transformstream'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, +} = require('internal/webstreams/writablestream'); + +const { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, +} = require('internal/webstreams/readablestream'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('internal/webstreams/queuingstrategies'); + +module.exports = { + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + ReadableByteStreamController, + ReadableStreamDefaultController, + TransformStream, + TransformStreamDefaultController, + WritableStream, + WritableStreamDefaultWriter, + WritableStreamDefaultController, + ByteLengthQueuingStrategy, + CountQueuingStrategy, +}; diff --git a/src/node_buffer.cc b/src/node_buffer.cc index e816ba131644ad..b5651b5e325fc9 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -67,6 +67,7 @@ using v8::MaybeLocal; using v8::Nothing; using v8::Number; using v8::Object; +using v8::SharedArrayBuffer; using v8::String; using v8::Uint32; using v8::Uint32Array; @@ -1158,6 +1159,60 @@ void GetZeroFillToggle(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(Uint32Array::New(ab, 0, 1)); } +void DetachArrayBuffer(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + if (args[0]->IsArrayBuffer()) { + Local buf = args[0].As(); + if (buf->IsDetachable()) { + std::shared_ptr store = buf->GetBackingStore(); + buf->Detach(); + args.GetReturnValue().Set(ArrayBuffer::New(env->isolate(), store)); + } + } +} + +void CopyArrayBuffer(const FunctionCallbackInfo& args) { + // args[0] == Destination ArrayBuffer + // args[1] == Destination ArrayBuffer Offset + // args[2] == Source ArrayBuffer + // args[3] == Source ArrayBuffer Offset + // args[4] == bytesToCopy + + CHECK(args[0]->IsArrayBuffer() || args[0]->IsSharedArrayBuffer()); + CHECK(args[1]->IsUint32()); + CHECK(args[2]->IsArrayBuffer() || args[2]->IsSharedArrayBuffer()); + CHECK(args[3]->IsUint32()); + CHECK(args[4]->IsUint32()); + + std::shared_ptr destination; + std::shared_ptr source; + + if (args[0]->IsArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + destination = args[0].As()->GetBackingStore(); + } + + if (args[2]->IsArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } else if (args[0]->IsSharedArrayBuffer()) { + source = args[2].As()->GetBackingStore(); + } + + uint32_t destination_offset = args[1].As()->Value(); + uint32_t source_offset = args[3].As()->Value(); + size_t bytes_to_copy = args[4].As()->Value(); + + CHECK_GE(destination->ByteLength() - destination_offset, bytes_to_copy); + CHECK_GE(source->ByteLength() - source_offset, bytes_to_copy); + + uint8_t* dest = + static_cast(destination->Data()) + destination_offset; + uint8_t* src = + static_cast(source->Data()) + source_offset; + memcpy(dest, src, bytes_to_copy); +} + void Initialize(Local target, Local unused, Local context, @@ -1176,6 +1231,9 @@ void Initialize(Local target, env->SetMethodNoSideEffect(target, "indexOfNumber", IndexOfNumber); env->SetMethodNoSideEffect(target, "indexOfString", IndexOfString); + env->SetMethod(target, "detachArrayBuffer", DetachArrayBuffer); + env->SetMethod(target, "copyArrayBuffer", CopyArrayBuffer); + env->SetMethod(target, "swap16", Swap16); env->SetMethod(target, "swap32", Swap32); env->SetMethod(target, "swap64", Swap64); @@ -1251,6 +1309,9 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) { registry->Register(StringWrite); registry->Register(GetZeroFillToggle); + registry->Register(DetachArrayBuffer); + registry->Register(CopyArrayBuffer); + Blob::RegisterExternalReferences(registry); FixedSizeBlobCopyJob::RegisterExternalReferences(registry); } diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 86f2eaada97b3b..60d3aeb98c17da 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -21,7 +21,7 @@ Last update: - html/webappapis/timers: https://github.com/web-platform-tests/wpt/tree/5873f2d8f1/html/webappapis/timers - interfaces: https://github.com/web-platform-tests/wpt/tree/fcb671ed8b/interfaces - resources: https://github.com/web-platform-tests/wpt/tree/972ca5b669/resources -- streams: https://github.com/web-platform-tests/wpt/tree/b869e60df1/streams +- streams: https://github.com/web-platform-tests/wpt/tree/8f60d94439/streams - url: https://github.com/web-platform-tests/wpt/tree/1fcb39223d/url [Web Platform Tests]: https://github.com/web-platform-tests/wpt diff --git a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js index db8ac3a39983fd..9aa508225865c8 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/general.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/general.any.js @@ -957,7 +957,8 @@ promise_test(() => { assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0201); + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0102); return reader.read(new Uint8Array(1)); }).then(result => { @@ -1138,7 +1139,7 @@ promise_test(() => { assert_equals(pullCount, 1, '1 pull() should have been made in response to partial fill by enqueue()'); assert_not_equals(byobRequest, null, 'byobRequest should not be null'); - assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() shouild be 2'); + assert_equals(viewInfos[0].byteLength, 2, 'byteLength before enqueue() should be 2'); assert_equals(viewInfos[1].byteLength, 1, 'byteLength after enqueue() should be 1'); reader.cancel(); @@ -1326,7 +1327,9 @@ promise_test(() => { const view = result.value; assert_equals(view.byteOffset, 0); assert_equals(view.byteLength, 2); - assert_equals(view[0], 0xaaff); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0xffaa); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); assert_equals(viewInfo.bufferByteLength, 2, 'view.buffer.byteLength should be 2'); @@ -1381,7 +1384,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 4, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0001, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0100, 'contents are set'); const p = reader.read(new Uint16Array(1)); @@ -1395,7 +1400,9 @@ promise_test(() => { assert_equals(view.buffer.byteLength, 2, 'buffer.byteLength'); assert_equals(view.byteOffset, 0, 'byteOffset'); assert_equals(view.byteLength, 2, 'byteLength'); - assert_equals(view[0], 0x0302, 'Contents are set'); + + const dataView = new DataView(view.buffer, view.byteOffset, view.byteLength); + assert_equals(dataView.getUint16(0), 0x0203, 'contents are set'); assert_not_equals(byobRequest, null, 'byobRequest must not be null'); assert_equals(viewInfo.constructor, Uint8Array, 'view.constructor should be Uint8Array'); diff --git a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js index 2dcab69f42db0d..7c0bffb78710fe 100644 --- a/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js +++ b/test/fixtures/wpt/streams/readable-byte-streams/non-transferable-buffers.any.js @@ -25,7 +25,7 @@ test(t => { const memory = new WebAssembly.Memory({ initial: 1 }); const view = new Uint8Array(memory.buffer, 0, 1); - assert_throws_js(t, TypeError, controller.enqueue(view)); + assert_throws_js(TypeError, () => controller.enqueue(view)); }, 'ReadableStream with byte source: enqueue() with a non-transferable buffer'); promise_test(async t => { @@ -54,5 +54,5 @@ promise_test(async t => { ); await pullCalledPromise; - assert_throws_js(t, TypeError, byobRequest.respondWithNewView(newView)); + assert_throws_js(TypeError, () => byobRequest.respondWithNewView(newView)); }, 'ReadableStream with byte source: respondWithNewView() with a non-transferable buffer'); diff --git a/test/fixtures/wpt/streams/writable-streams/aborting.any.js b/test/fixtures/wpt/streams/writable-streams/aborting.any.js index 5c053bab915700..ab154a705ed0e9 100644 --- a/test/fixtures/wpt/streams/writable-streams/aborting.any.js +++ b/test/fixtures/wpt/streams/writable-streams/aborting.any.js @@ -1376,3 +1376,111 @@ promise_test(t => { return promise_rejects_js(t, TypeError, ws.abort(), 'abort should reject') .then(() => writer.ready); }, 'abort on a locked stream should reject'); + +test(t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const e = Error('hello'); + + assert_true(ctrl.signal instanceof AbortSignal); + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + ws.abort(e); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, e); +}, 'WritableStreamDefaultController.signal'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + write() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.write(99); + await called; + + assert_false(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); + writer.abort(); + assert_true(ctrl.signal.aborted); + assert_equals(ctrl.abortReason, undefined); +}, 'the abort signal is signalled synchronously - write'); + +promise_test(async t => { + let ctrl; + let resolve; + const called = new Promise(r => resolve = r); + + const ws = new WritableStream({ + start(c) { ctrl = c; }, + close() { resolve(); return new Promise(() => {}); } + }); + const writer = ws.getWriter(); + + writer.close(99); + await called; + + assert_false(ctrl.signal.aborted); + writer.abort(); + assert_true(ctrl.signal.aborted); +}, 'the abort signal is signalled synchronously - close'); + +promise_test(async t => { + let ctrl; + const ws = new WritableStream({start(c) { ctrl = c; }}); + const writer = ws.getWriter(); + + const e = TypeError(); + ctrl.error(e); + await promise_rejects_exactly(t, e, writer.closed); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on error'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async write() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.write('hello'), 'write result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on write failure'); + +promise_test(async t => { + let ctrl; + const e = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + async close() { throw e; } + }); + const writer = ws.getWriter(); + + await promise_rejects_exactly(t, e, writer.close(), 'close result'); + await promise_rejects_exactly(t, e, writer.closed, 'closed'); + assert_false(ctrl.signal.aborted); +}, 'the abort signal is not signalled on close failure'); + +promise_test(async t => { + let ctrl; + const e1 = SyntaxError(); + const e2 = TypeError(); + const ws = new WritableStream({ + start(c) { ctrl = c; }, + }); + + const writer = ws.getWriter(); + ctrl.signal.addEventListener('abort', () => writer.abort(e2)); + writer.abort(e1); + assert_true(ctrl.signal.aborted); + + await promise_rejects_exactly(t, e2, writer.closed, 'closed'); +}, 'recursive abort() call'); diff --git a/test/fixtures/wpt/versions.json b/test/fixtures/wpt/versions.json index 50316e8c583eb1..6567782a1d47c9 100644 --- a/test/fixtures/wpt/versions.json +++ b/test/fixtures/wpt/versions.json @@ -44,7 +44,7 @@ "path": "resources" }, "streams": { - "commit": "b869e60df1b8d3840e09b41c5e987c7e23f6856c", + "commit": "8f60d9443949c323522a2009518d54d5d6ab5541", "path": "streams" }, "url": { diff --git a/test/parallel/test-whatwg-readablebytestream.js b/test/parallel/test-whatwg-readablebytestream.js new file mode 100644 index 00000000000000..eb4355505053ef --- /dev/null +++ b/test/parallel/test-whatwg-readablebytestream.js @@ -0,0 +1,238 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + ReadableByteStreamController, + ReadableStreamDefaultReader, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + open, +} = require('fs/promises'); + +const { + readFileSync, +} = require('fs'); + +const { + Buffer, +} = require('buffer'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream({ + type: 'bytes', + }); + + assert(r[kState].controller instanceof ReadableByteStreamController); + + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + const defaultReader = r.getReader(); + assert(r.locked); + assert(defaultReader instanceof ReadableStreamDefaultReader); + defaultReader.releaseLock(); + const byobReader = r.getReader({ mode: 'byob' }); + assert(byobReader instanceof ReadableStreamBYOBReader); +} + +class Source { + constructor() { + this.controllerClosed = false; + } + + async start(controller) { + this.file = await open(__filename); + this.controller = controller; + } + + async pull(controller) { + const byobRequest = controller.byobRequest; + assert.match(inspect(byobRequest), /ReadableStreamBYOBRequest/); + + const view = byobRequest.view; + const { + bytesRead, + } = await this.file.read({ + buffer: view, + offset: view.byteOffset, + length: view.byteLength + }); + + if (bytesRead === 0) { + await this.file.close(); + this.controller.close(); + } + + assert.throws(() => byobRequest.respondWithNewView({}), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + byobRequest.respond(bytesRead); + + assert.throws(() => byobRequest.respond(bytesRead), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => byobRequest.respondWithNewView(view), { + code: 'ERR_INVALID_STATE', + }); + } + + get type() { return 'bytes'; } + + get autoAllocateChunkSize() { return 1024; } +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const reader = stream.getReader({ mode: 'byob' }); + + const chunks = []; + let result; + do { + result = await reader.read(Buffer.alloc(100)); + if (result.value !== undefined) + chunks.push(Buffer.from(result.value)); + } while (!result.done); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(check, data); + })); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + break; + } + + read(stream).then(common.mustCall()); +} + +{ + const stream = new ReadableStream(new Source()); + assert(stream[kState].controller instanceof ReadableByteStreamController); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream) + throw error; + } + + assert.rejects(read(stream), error); +} + +{ + assert.throws(() => { + Reflect.get(ReadableStreamBYOBRequest.prototype, 'view', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => ReadableStreamBYOBRequest.prototype.respond.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + ReadableStreamBYOBRequest.prototype.respondWithNewView.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + const readable = new ReadableStream({ type: 'bytes' }); + const reader = readable.getReader({ mode: 'byob' }); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + new ReadableStream({ + type: 'bytes', + start(c) { controller = c; } + }); + assert.throws(() => controller.enqueue(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + controller.close(); + assert.throws(() => controller.enqueue(new Uint8Array(10)), { + code: 'ERR_INVALID_STATE', + }); + assert.throws(() => controller.close(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + +} diff --git a/test/parallel/test-whatwg-readablestream.js b/test/parallel/test-whatwg-readablestream.js new file mode 100644 index 00000000000000..1c18efeec41963 --- /dev/null +++ b/test/parallel/test-whatwg-readablestream.js @@ -0,0 +1,1522 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { + isPromise, +} = require('util/types'); +const { + setImmediate: delay +} = require('timers/promises'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, + ReadableStream, + ReadableStreamDefaultReader, + ReadableStreamDefaultController, + ReadableByteStreamController, + ReadableStreamBYOBReader, + ReadableStreamBYOBRequest, + WritableStream, +} = require('stream/web'); + +const { + readableStreamPipeTo, + readableStreamTee, + readableByteStreamControllerConvertPullIntoDescriptor, + readableStreamDefaultControllerEnqueue, + readableByteStreamControllerEnqueue, + readableStreamDefaultControllerCanCloseOrEnqueue, + readableByteStreamControllerClose, + readableByteStreamControllerRespond, +} = require('internal/webstreams/readablestream'); + +const { + kState +} = require('internal/webstreams/util'); + +const { + createReadStream, + readFileSync, +} = require('fs'); +const { + Buffer, +} = require('buffer'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +{ + const r = new ReadableStream(); + assert.strictEqual(typeof r.locked, 'boolean'); + assert.strictEqual(typeof r.cancel, 'function'); + assert.strictEqual(typeof r.getReader, 'function'); + assert.strictEqual(typeof r.pipeThrough, 'function'); + assert.strictEqual(typeof r.pipeTo, 'function'); + assert.strictEqual(typeof r.tee, 'function'); + + ['', null, 'asdf'].forEach((mode) => { + assert.throws(() => r.getReader({ mode }), { + code: 'ERR_INVALID_ARG_VALUE', + }); + }); + + [1, 'asdf'].forEach((options) => { + assert.throws(() => r.getReader(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + assert(!r.locked); + r.getReader(); + assert(r.locked); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall((controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableStreamDefaultController); + }), + cancel: common.mustNotCall(), + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall((controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustNotCall(), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source); +} + +{ + const source = { + start: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + pull: common.mustCall(async (controller) => { + assert(controller instanceof ReadableByteStreamController); + }), + cancel: common.mustNotCall(), + type: 'bytes', + }; + + new ReadableStream(source, { highWaterMark: 10 }); +} + +{ + // These are silly but they should all work per spec + new ReadableStream(1); + new ReadableStream('hello'); + new ReadableStream(false); + new ReadableStream([]); + new ReadableStream(1, 1); + new ReadableStream(1, 'hello'); + new ReadableStream(1, false); + new ReadableStream(1, []); +} + +['a', {}, false].forEach((size) => { + assert.throws(() => { + new ReadableStream({}, { size }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +[-1, NaN].forEach((highWaterMark) => { + assert.throws(() => { + new ReadableStream({}, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert.throws(() => { + new ReadableStream({ type: 'bytes' }, { highWaterMark }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +{ + new ReadableStream({}, new ByteLengthQueuingStrategy({ highWaterMark: 1 })); + new ReadableStream({}, new CountQueuingStrategy({ highWaterMark: 1 })); +} + +{ + const strategy = new ByteLengthQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 10); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 10); +} + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + assert.strictEqual(strategy.highWaterMark, 1); + assert.strictEqual(strategy.size(new ArrayBuffer(10)), 1); + + const { size } = strategy; + assert.strictEqual(size(new ArrayBuffer(10)), 1); +} + +{ + const r = new ReadableStream({ + async start() { + throw new Error('boom'); + } + }); + + setImmediate(() => { + assert.strictEqual(r[kState].state, 'errored'); + assert.match(r[kState].storedError?.message, /boom/); + }); +} + +{ + const data = Buffer.from('hello'); + const r = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + let res = await reader.read(); + if (res.done) return; + const buf = Buffer.from(res.value); + assert.strictEqual(buf.toString(), data.toString()); + res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +{ + const r = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + (async function read() { + const reader = r.getReader(); + const res = await reader.read(); + assert(res.done); + })().then(common.mustCall()); +} + +assert.throws(() => { + new ReadableStream({ + get start() { throw new Error('boom1'); } + }, { + get size() { throw new Error('boom2'); } + }); +}, /boom2/); + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + + assert(stream.locked); + assert.strictEqual(reader[kState].stream, stream); + assert.strictEqual(stream[kState].reader, reader); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); + + assert(reader instanceof ReadableStreamDefaultReader); + + assert(isPromise(reader.closed)); + assert.strictEqual(typeof reader.cancel, 'function'); + assert.strictEqual(typeof reader.read, 'function'); + assert.strictEqual(typeof reader.releaseLock, 'function'); + + const read1 = reader.read(); + const read2 = reader.read(); + + // The stream is empty so the read will never settle. + read1.then( + common.mustNotCall(), + common.mustNotCall() + ); + + // The stream is empty so the read will never settle. + read2.then( + common.mustNotCall(), + common.mustNotCall() + ); + + assert.notStrictEqual(read1, read2); + + assert.strictEqual(reader[kState].readRequests.length, 2); + + delay().then(common.mustCall()); + + assert.throws(() => reader.releaseLock(), { + code: 'ERR_INVALID_STATE', + }); + assert(stream.locked); +} + +{ + const stream = new ReadableStream(); + const reader = stream.getReader(); + const closedBefore = reader.closed; + assert(stream.locked); + reader.releaseLock(); + assert(!stream.locked); + const closedAfter = reader.closed; + + assert.strictEqual(closedBefore, closedAfter); + + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + + assert.rejects(closedBefore, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(Buffer.from('hello')); + } + }); + + const reader = stream.getReader(); + + assert.rejects(stream.cancel(), { + code: 'ERR_INVALID_STATE', + }); + + reader.cancel(); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + assert(!stream.locked); + + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader().releaseLock(); + stream.getReader().releaseLock(); + stream.getReader(); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + } + }); + + stream.getReader(); + + assert.throws(() => stream.getReader(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const closedBefore = reader.closed; + reader.releaseLock(); + const closedAfter = reader.closed; + assert.notStrictEqual(closedBefore, closedAfter); + + closedBefore.then(common.mustCall()); + assert.rejects(closedAfter, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.close(); + }, + }); + + const reader = stream.getReader(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + }, + }); + + const reader = stream.getReader(); + c.close(); + + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + const closed = reader.closed; + + assert.notStrictEqual(cancel1, cancel2); + assert.notStrictEqual(cancel1, closed); + assert.notStrictEqual(cancel2, closed); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); +} + +{ + const stream = new ReadableStream(); + const cancel1 = stream.cancel(); + const cancel2 = stream.cancel(); + assert.notStrictEqual(cancel1, cancel2); + + Promise.all([cancel1, cancel2]).then(common.mustCall((res) => { + assert.deepStrictEqual(res, [undefined, undefined]); + })); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.error(error); + } + }); + const reader = stream.getReader(); + const cancel1 = reader.cancel(); + const cancel2 = reader.cancel(); + assert.notStrictEqual(cancel1, cancel2); + assert.rejects(cancel1, error); + assert.rejects(cancel2, error); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + async start(controller) { + throw error; + } + }); + stream.getReader().releaseLock(); + const reader = stream.getReader(); + assert.rejects(reader.closed, error); + assert.rejects(reader.read(), error); + assert.rejects(reader.read(), error); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + let doClose; + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + doClose = controller.close.bind(controller); + } + }); + const reader = stream.getReader(); + doClose(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + })); + })); +} + +{ + const buf1 = Buffer.from('hello'); + const buf2 = Buffer.from('there'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(buf1); + controller.enqueue(buf2); + } + }); + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf1); + assert(!done); + reader.read().then(common.mustCall(({ value, done }) => { + assert.deepStrictEqual(value, buf2); + assert(!done); + reader.read().then(common.mustNotCall()); + delay().then(common.mustCall()); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + async function read(stream) { + const reader = stream.getReader(); + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2), + ]).then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull() { throw error; } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(stream.locked); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + const closed1 = reader1.closed; + const closed2 = reader2.closed; + + assert.notStrictEqual(closed1, closed2); + + assert.rejects(closed1, error); + assert.rejects(closed2, error); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert(s1 instanceof ReadableStream); + assert(s2 instanceof ReadableStream); + + s2.cancel(); + + async function read(stream, canceled = false) { + const reader = stream.getReader(); + if (!canceled) { + assert.deepStrictEqual( + await reader.read(), { value: 'a', done: false }); + assert.deepStrictEqual( + await reader.read(), { value: 'b', done: false }); + } + assert.deepStrictEqual( + await reader.read(), { value: undefined, done: true }); + } + + Promise.all([ + read(s1), + read(s2, true), + ]).then(common.mustCall()); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s1.cancel(error1); + s2.cancel(error2); +} + +{ + const error1 = new Error('boom1'); + const error2 = new Error('boom2'); + + const stream = new ReadableStream({ + cancel(reason) { + assert.deepStrictEqual(reason, [error1, error2]); + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + s2.cancel(error2); + s1.cancel(error1); +} + +{ + const error = new Error('boom1'); + + const stream = new ReadableStream({ + cancel() { + throw error; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + c.error(error); + + assert.rejects(s1.cancel(), error); + assert.rejects(s2.cancel(), error); +} + +{ + const error = new Error('boom1'); + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const { 0: s1, 1: s2 } = stream.tee(); + + const reader1 = s1.getReader(); + const reader2 = s2.getReader(); + + assert.rejects(reader1.closed, error); + assert.rejects(reader2.closed, error); + + assert.rejects(reader1.read(), error); + assert.rejects(reader2.read(), error); + + setImmediate(() => c.error(error)); +} + +{ + let pullCount = 0; + const stream = new ReadableStream({ + pull(controller) { + if (pullCount) + controller.enqueue(pullCount); + pullCount++; + }, + }); + + const reader = stream.getReader(); + + queueMicrotask(common.mustCall(() => { + assert.strictEqual(pullCount, 1); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 1); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 2); + assert(!done); + })); + + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + }, + pull: common.mustCall(), + }); + + stream.getReader().read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + }, + pull: common.mustCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + })); + })); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.close(); + }, + pull: common.mustNotCall(), + }); + + const reader = stream.getReader(); + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'a'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, 'b'); + assert(!done); + + reader.read().then(common.mustCall(({ value, done }) => { + assert.strictEqual(value, undefined); + assert(done); + })); + + })); + })); +} + +{ + let res; + let promise; + let calls = 0; + const stream = new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + promise = new Promise((resolve) => res = resolve); + return promise; + } + }); + + const reader = stream.getReader(); + + (async () => { + await reader.read(); + assert.strictEqual(calls, 1); + await delay(); + assert.strictEqual(calls, 1); + res(); + await delay(); + assert.strictEqual(calls, 2); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + }, + pull: common.mustCall(4), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + const stream = new ReadableStream({ + start(controller) { + controller.enqueue('a'); + controller.enqueue('b'); + controller.enqueue('c'); + controller.close(); + }, + pull: common.mustNotCall(), + }, { + highWaterMark: Infinity, + size() { return 1; } + }); + + const reader = stream.getReader(); + (async () => { + await delay(); + await reader.read(); + await reader.read(); + await reader.read(); + })().then(common.mustCall()); +} + +{ + let calls = 0; + let res; + const ready = new Promise((resolve) => res = resolve); + + new ReadableStream({ + pull(controller) { + controller.enqueue(++calls); + if (calls === 4) + res(); + } + }, { + size() { return 1; }, + highWaterMark: 4 + }); + + ready.then(common.mustCall(() => { + assert.strictEqual(calls, 4); + })); +} + +{ + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.close()) + }); + + const reader = stream.getReader(); + + reader.closed.then(common.mustCall()); +} + +{ + const error = new Error('boom'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => controller.error(error)) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + const error = new Error('boom'); + const error2 = new Error('boom2'); + const stream = new ReadableStream({ + pull: common.mustCall((controller) => { + controller.error(error); + throw error2; + }) + }); + + const reader = stream.getReader(); + + assert.rejects(reader.closed, error); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue('a'); + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start: common.mustCall((controller) => { + controller.close(); + assert.throws(() => controller.enqueue('b'), { + code: 'ERR_INVALID_STATE' + }); + startCalled = true; + }) + }); + assert(startCalled); +} + +{ + class Source { + startCalled = false; + pullCalled = false; + cancelCalled = false; + + start(controller) { + assert.strictEqual(this, source); + this.startCalled = true; + controller.enqueue('a'); + } + + pull() { + assert.strictEqual(this, source); + this.pullCalled = true; + } + + cancel() { + assert.strictEqual(this, source); + this.cancelCalled = true; + } + } + + const source = new Source(); + + const stream = new ReadableStream(source); + const reader = stream.getReader(); + + (async () => { + await reader.read(); + reader.releaseLock(); + stream.cancel(); + assert(source.startCalled); + assert(source.pullCalled); + assert(source.cancelCalled); + })().then(common.mustCall()); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.close(); + assert.strictEqual(controller.desiredSize, 0); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 10); + controller.error(); + assert.strictEqual(controller.desiredSize, null); + startCalled = true; + } + }, { + highWaterMark: 10 + }); + assert(startCalled); +} + +{ + class Foo extends ReadableStream {} + const foo = new Foo(); + foo.getReader(); +} + +{ + let startCalled = false; + new ReadableStream({ + start(controller) { + assert.strictEqual(controller.desiredSize, 1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, 0); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -1); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -2); + controller.enqueue('a'); + assert.strictEqual(controller.desiredSize, -3); + startCalled = true; + } + }); + assert(startCalled); +} + +{ + let c; + const stream = new ReadableStream({ + start(controller) { + c = controller; + } + }); + + const reader = stream.getReader(); + + (async () => { + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + c.enqueue(1); + c.enqueue(1); + assert.strictEqual(c.desiredSize, -1); + await reader.read(); + assert.strictEqual(c.desiredSize, 0); + await reader.read(); + assert.strictEqual(c.desiredSize, 1); + })().then(common.mustCall()); +} + +{ + let c; + new ReadableStream({ + start(controller) { + c = controller; + } + }); + assert(c instanceof ReadableStreamDefaultController); + assert.strictEqual(typeof c.desiredSize, 'number'); + assert.strictEqual(typeof c.enqueue, 'function'); + assert.strictEqual(typeof c.close, 'function'); + assert.strictEqual(typeof c.error, 'function'); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const reader = stream.getReader(); + const chunks = []; + let read = await reader.read(); + while (!read.done) { + chunks.push(Buffer.from(read.value)); + read = await reader.read(); + } + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + const chunks = []; + for await (const chunk of stream) + chunks.push(chunk); + return Buffer.concat(chunks); + } + + read(stream).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.deepStrictEqual(data, check); + + assert.strictEqual(stream[kState].state, 'closed'); + assert(!stream.locked); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + [1, false, ''].forEach((options) => { + assert.throws(() => stream.values(options), { + code: 'ERR_INVALID_ARG_TYPE', + }); + }); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + return; + } + + read(stream).then(common.mustCall((data) => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: true })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'readable'); + })); +} + +{ + const source = new Source(); + const stream = new ReadableStream(source); + + const error = new Error('boom'); + + async function read(stream) { + // eslint-disable-next-line no-unused-vars + for await (const _ of stream.values({ preventCancel: false })) + throw error; + } + + assert.rejects(read(stream), error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'closed'); + })); +} + +{ + assert.throws(() => Reflect.get(ReadableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStream.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.getReader.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype.tee.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => ReadableStream.prototype[kTransfer].call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamDefaultReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamDefaultReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamDefaultReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.read.call({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableStreamBYOBReader.prototype.releaseLock.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => { + return Reflect.get(ReadableStreamBYOBReader.prototype, 'closed'); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => ReadableStreamBYOBReader.prototype.cancel.call({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'byobRequest', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + Reflect.get(ReadableByteStreamController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.close.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.enqueue.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + ReadableByteStreamController.prototype.error.call({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new ReadableStreamBYOBRequest(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); + + assert.throws(() => new ReadableByteStreamController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + assert.strictEqual( + inspect(readable), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: null }), + 'ReadableStream { locked: false, state: \'readable\' }'); + assert.strictEqual( + inspect(readable, { depth: 0 }), + 'ReadableStream [Object]'); + + assert.strictEqual( + inspect(controller), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: null }), + 'ReadableStreamDefaultController {}'); + assert.strictEqual( + inspect(controller, { depth: 0 }), + 'ReadableStreamDefaultController {}'); + + const reader = readable.getReader(); + + assert.match( + inspect(reader), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: null }), + /ReadableStreamDefaultReader/); + assert.match( + inspect(reader, { depth: 0 }), + /ReadableStreamDefaultReader/); + + assert.rejects(readableStreamPipeTo(1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects(readableStreamPipeTo(new ReadableStream(), 1), { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.rejects( + readableStreamPipeTo( + new ReadableStream(), + new WritableStream(), + false, + false, + false, + {}), + { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +{ + const readable = new ReadableStream(); + const reader = readable.getReader(); + reader.releaseLock(); + reader.releaseLock(); + assert.rejects(reader.read(), { + code: 'ERR_INVALID_STATE', + }); + assert.rejects(reader.cancel(), { + code: 'ERR_INVALID_STATE', + }); +} + +{ + // Test tee() cloneForBranch2 argument + const readable = new ReadableStream({ + start(controller) { + controller.enqueue('hello'); + } + }); + const [r1, r2] = readableStreamTee(readable, true); + r1.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); + r2.getReader().read().then( + common.mustCall(({ value }) => assert.strictEqual(value, 'hello'))); +} + +{ + assert.throws(() => { + readableByteStreamControllerConvertPullIntoDescriptor({ + bytesFilled: 10, + byteLength: 5 + }); + }, { + code: 'ERR_INVALID_STATE', + }); +} + +{ + let controller; + const readable = new ReadableStream({ + start(c) { controller = c; } + }); + + controller[kState].pendingPullIntos = [{}]; + assert.throws(() => readableByteStreamControllerRespond(controller, 0), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + readable.cancel().then(common.mustCall()); + + assert.throws(() => readableByteStreamControllerRespond(controller, 1), { + code: 'ERR_INVALID_ARG_VALUE', + }); + + assert(!readableStreamDefaultControllerCanCloseOrEnqueue(controller)); + readableStreamDefaultControllerEnqueue(controller); + readableByteStreamControllerClose(controller); + readableByteStreamControllerEnqueue(controller); +} diff --git a/test/parallel/test-whatwg-transformstream.js b/test/parallel/test-whatwg-transformstream.js new file mode 100644 index 00000000000000..0cbc76cc4ce8c0 --- /dev/null +++ b/test/parallel/test-whatwg-transformstream.js @@ -0,0 +1,188 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + ReadableStream, + TransformStream, + TransformStreamDefaultController, +} = require('stream/web'); + +const { + createReadStream, + readFileSync, +} = require('fs'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +assert.throws(() => new TransformStream({ readableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); +assert.throws(() => new TransformStream({ writableType: 1 }), { + code: 'ERR_INVALID_ARG_VALUE', +}); + + +{ + const stream = new TransformStream(); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'hello'); + } + + test(stream).then(common.mustCall()); +} + +class Transform { + start(controller) { + this.started = true; + } + + async transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + + async flush() { + this.flushed = true; + } +} + +{ + const transform = new Transform(); + const stream = new TransformStream(transform); + assert(transform.started); + + async function test(stream) { + const writer = stream.writable.getWriter(); + const reader = stream.readable.getReader(); + + const { 1: result } = await Promise.all([ + writer.write('hello'), + reader.read(), + ]); + + assert.strictEqual(result.value, 'HELLO'); + + await writer.close(); + } + + test(stream).then(common.mustCall(() => { + assert(transform.flushed); + })); +} + +class Source { + constructor() { + this.cancelCalled = false; + } + + start(controller) { + this.stream = createReadStream(__filename); + this.stream.on('data', (chunk) => { + controller.enqueue(chunk.toString()); + }); + this.stream.once('end', () => { + if (!this.cancelCalled) + controller.close(); + }); + this.stream.once('error', (error) => { + controller.error(error); + }); + } + + cancel() { + this.cancelCalled = true; + } +} + +{ + const instream = new ReadableStream(new Source()); + const tstream = new TransformStream(new Transform()); + const r = instream.pipeThrough(tstream); + + async function read(stream) { + let res = ''; + for await (const chunk of stream) + res += chunk; + return res; + } + + read(r).then(common.mustCall((data) => { + const check = readFileSync(__filename); + assert.strictEqual(check.toString().toUpperCase(), data); + })); +} + +{ + assert.throws(() => Reflect.get(TransformStream.prototype, 'readable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => Reflect.get(TransformStream.prototype, 'writable', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => TransformStream.prototype[kTransfer]({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(TransformStreamDefaultController.prototype, 'desiredSize', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.enqueue({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => { + TransformStreamDefaultController.prototype.terminate({}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => new TransformStreamDefaultController(), { + code: 'ERR_ILLEGAL_CONSTRUCTOR', + }); +} + +{ + let controller; + const transform = new TransformStream({ + start(c) { + controller = c; + } + }); + + assert.match(inspect(transform), /TransformStream/); + assert.match(inspect(transform, { depth: null }), /TransformStream/); + assert.match(inspect(transform, { depth: 0 }), /TransformStream \[/); + + assert.match(inspect(controller), /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /TransformStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /TransformStreamDefaultController \[/); +} diff --git a/test/parallel/test-whatwg-webstreams-coverage.js b/test/parallel/test-whatwg-webstreams-coverage.js new file mode 100644 index 00000000000000..f0036723b05977 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-coverage.js @@ -0,0 +1,70 @@ +// Flags: --no-warnings --expose-internals +'use strict'; + +require('../common'); + +const { + ByteLengthQueuingStrategy, + CountQueuingStrategy, +} = require('stream/web'); + +const { + inspect, +} = require('util'); + +const { + isPromisePending, +} = require('internal/webstreams/util'); + +const assert = require('assert'); + +assert(!isPromisePending({})); +assert(!isPromisePending(Promise.resolve())); +assert(isPromisePending(new Promise(() => {}))); + +// Brand checking works +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(ByteLengthQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'highWaterMark', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +assert.throws(() => { + Reflect.get(CountQueuingStrategy.prototype, 'size', {}); +}, { + code: 'ERR_INVALID_THIS' +}); + +// Custom Inspect Works + +{ + const strategy = new CountQueuingStrategy({ highWaterMark: 1 }); + + assert.strictEqual( + inspect(strategy, { depth: null }), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy), + 'CountQueuingStrategy { highWaterMark: 1 }'); + + assert.strictEqual( + inspect(strategy, { depth: 0 }), + 'CountQueuingStrategy [Object]'); + + assert.strictEqual( + inspect(new ByteLengthQueuingStrategy({ highWaterMark: 1 })), + 'ByteLengthQueuingStrategy { highWaterMark: 1 }'); +} diff --git a/test/parallel/test-whatwg-webstreams-transfer.js b/test/parallel/test-whatwg-webstreams-transfer.js new file mode 100644 index 00000000000000..2b7333d9c6fbf7 --- /dev/null +++ b/test/parallel/test-whatwg-webstreams-transfer.js @@ -0,0 +1,503 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); + +const { + ReadableStream, + WritableStream, + TransformStream, +} = require('stream/web'); + +const { + Worker +} = require('worker_threads'); + +const { + isReadableStream, +} = require('internal/webstreams/readablestream'); + +const { + isWritableStream, +} = require('internal/webstreams/writablestream'); + +const { + isTransformStream, +} = require('internal/webstreams/transformstream'); + +const { + makeTransferable, + kClone, + kTransfer, + kDeserialize, +} = require('internal/worker/js_transferable'); + +const assert = require('assert'); + +const theData = 'hello'; + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // This test takes the ReadableStream and transfers it to the + // port1 first, then again to port2, which reads the data. + // Internally, this sets up a pipelined data flow that is + // important to understand in case this test fails.. + // + // Specifically: + // + // 1. We start with ReadableStream R1, + // 2. Calling port2.postMessage causes a new internal WritableStream W1 + // and a new ReadableStream R2 to be created, both of which are coupled + // to each other via a pair of MessagePorts P1 and P2. + // 3. ReadableStream R2 is passed to the port1.onmessage callback as the + // data property of the MessageEvent, and R1 is configured to pipeTo W1. + // 4. Within port1.onmessage, we transfer ReadableStream R2 to port1, which + // creates a new internal WritableStream W2 and a new ReadableStream R3, + // both of which are coupled to each other via a pair of MessagePorts + // P3 and P4. + // 5. ReadableStream R3 is passed to the port2.onmessage callback as the + // data property of the MessageEvent, and R2 is configured to pipeTo W2. + // 6. Once the reader is attached to R3 in the port2.onmessage callback, + // a message is sent along the path: R3 -> P4 -> P3 -> R2 -> P2 -> P1 -> R1 + // to begin pulling the data. The data is then pushed along the pipeline + // R1 -> W1 -> P1 -> P2 -> R2 -> W2 -> P3 -> P4 -> R3 + // 7. The MessagePorts P1, P2, P3, and P4 serve as a control channel for + // passing data and control instructions, potentially across realms, + // to the other ReadableStream and WritableStream instances. + // + // If this test experiences timeouts (hangs without finishing), it's most + // likely because the control instructions are somehow broken and the + // MessagePorts are not being closed properly or it could be caused by + // failing the close R1's controller which signals the end of the data + // flow. + + const readable = new ReadableStream({ + start: common.mustCall((controller) => { + controller.enqueue(theData); + controller.close(); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + + const reader = data.getReader(); + reader.read().then(common.mustCall((chunk) => { + assert.deepStrictEqual(chunk, { done: false, value: theData }); + })); + + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isReadableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(readable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(readable, [readable]); + assert(readable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // Like the ReadableStream test above, this sets up a pipeline + // through which the data flows... + // + // We start with WritableStream W1, which is transfered to port1. + // Doing so creates an internal ReadableStream R1 and WritableStream W2, + // which are coupled together with MessagePorts P1 and P2. + // The port1.onmessage callback receives WritableStream W2 and + // immediately transfers that to port2. Doing so creates an internal + // ReadableStream R2 and WritableStream W3, which are coupled together + // with MessagePorts P3 and P4. WritableStream W3 is handed off to + // port2.onmessage. + // + // When the writer on port2.onmessage writes the chunk of data, it + // gets passed along the pipeline: + // W3 -> P4 -> P3 -> R2 -> W2 -> P2 -> P1 -> R1 -> W1 + + const writable = new WritableStream({ + write: common.mustCall((chunk) => { + assert.strictEqual(chunk, theData); + }), + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + const writer = data.getWriter(); + writer.write(theData).then(common.mustCall()); + writer.close(); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isWritableStream(data)); + assert(!data.locked); + port1.postMessage(data, [data]); + assert(data.locked); + }); + + assert.throws(() => port2.postMessage(writable), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(writable, [writable]); + assert(writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + port1.onmessageerror = common.mustNotCall(); + port2.onmessageerror = common.mustNotCall(); + + // The data flow here is actually quite complicated, and is a combination + // of the WritableStream and ReadableStream examples above. + // + // We start with TransformStream T1, which creates ReadableStream R1, + // and WritableStream W1. + // + // When T1 is transfered to port1.onmessage, R1 and W1 are individually + // transfered. + // + // When R1 is transfered, it creates internal WritableStream W2, and + // new ReadableStream R2, coupled together via MessagePorts P1 and P2. + // + // When W1 is transfered, it creates internal ReadableStream R3 and + // new WritableStream W3, coupled together via MessagePorts P3 and P4. + // + // A new TransformStream T2 is created that owns ReadableStream R2 and + // WritableStream W3. The port1.onmessage callback immediately transfers + // that to port2.onmessage. + // + // When T2 is transfered, R2 and W3 are individually transfered. + // + // When R2 is transfered, it creates internal WritableStream W4, and + // ReadableStream R4, coupled together via MessagePorts P5 and P6. + // + // When W3 is transfered, it creates internal ReadableStream R5, and + // WritableStream W5, coupled together via MessagePorts P7 and P8. + // + // A new TransformStream T3 is created that owns ReadableStream R4 and + // WritableStream W5. + // + // port1.onmessage then writes a chunk of data. That chunk of data + // flows through the pipeline to T1: + // + // W5 -> P8 -> P7 -> R5 -> W3 -> P4 -> P3 -> R3 -> W1 -> T1 + // + // T1 performs the transformation, then pushes the chunk back out + // along the pipeline: + // + // T1 -> R1 -> W2 -> P1 -> P2 -> R2 -> W4 -> P5 -> P6 -> R4 + + const transform = new TransformStream({ + transform(chunk, controller) { + controller.enqueue(chunk.toUpperCase()); + } + }); + + port2.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + const writer = data.writable.getWriter(); + const reader = data.readable.getReader(); + Promise.all([ + writer.write(theData), + writer.close(), + reader.read().then(common.mustCall((result) => { + assert(!result.done); + assert.strictEqual(result.value, theData.toUpperCase()); + })), + reader.read().then(common.mustCall((result) => { + assert(result.done); + })), + ]).then(common.mustCall()); + port2.close(); + }); + + port1.onmessage = common.mustCall(({ data }) => { + assert(isTransformStream(data)); + assert(!data.readable.locked); + assert(!data.writable.locked); + port1.postMessage(data, [data]); + assert(data.readable.locked); + assert(data.writable.locked); + }); + + assert.throws(() => port2.postMessage(transform), { + code: 'ERR_MISSING_TRANSFERABLE_IN_TRANSFER_LIST', + }); + + port2.postMessage(transform, [transform]); + assert(transform.readable.locked); + assert(transform.writable.locked); +} + +{ + const { port1, port2 } = new MessageChannel(); + let controller; + + const readable = new ReadableStream({ + start(c) { controller = c; }, + + cancel: common.mustCall((error) => { + assert.strictEqual(error.code, 25); // DataCloneError + }), + }); + + port1.onmessage = ({ data }) => { + const reader = data.getReader(); + assert.rejects(reader.read(), { + code: 25, // DataCloneError + }); + port1.close(); + }; + + port2.postMessage(readable, [readable]); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + controller.enqueue(notActuallyTransferable); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + assert.strictEqual(error.name, 'DataCloneError'); + }); + }) + }; + + const writable = new WritableStream(source); + + const notActuallyTransferable = makeTransferable({ + [kClone]() { + return { + data: {}, + deserializeInfo: 'nothing that will work', + }; + }, + [kDeserialize]: common.mustNotCall(), + }); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, { + code: 25, + name: 'DataCloneError', + }); + + writer.write(notActuallyTransferable).then(common.mustCall()); + + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const error = new Error('boom'); + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((reason) => { + process.nextTick(() => { + assert.deepStrictEqual(reason, error); + + // Reason is a clone of the original error. + assert.notStrictEqual(reason, error); + }); + }), + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const { port1, port2 } = new MessageChannel(); + + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => assert.strictEqual(error.code, 25)); + }) + }; + + const writable = new WritableStream(source); + + port1.onmessage = common.mustCall(({ data }) => { + const writer = data.getWriter(); + + const m = new WebAssembly.Memory({ initial: 1 }); + + assert.rejects(writer.abort(m), { + code: 25 + }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + // Verify that the communication works across worker threads... + + const worker = new Worker(` + const { + isReadableStream, + } = require('internal/webstreams/readablestream'); + + const { + parentPort, + } = require('worker_threads'); + + const assert = require('assert'); + + const tracker = new assert.CallTracker(); + process.on('exit', () => { + tracker.verify(); + }); + + parentPort.onmessage = tracker.calls(({ data }) => { + assert(isReadableStream(data)); + const reader = data.getReader(); + reader.read().then(tracker.calls((result) => { + assert(!result.done); + assert(result.value instanceof Uint8Array); + })); + parentPort.close(); + }); + parentPort.onmessageerror = () => assert.fail('should not be called'); + `, { eval: true }); + + worker.on('error', common.mustNotCall()); + + const readable = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(10)); + controller.close(); + } + }); + + worker.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall(), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + data.cancel().then(common.mustCall()); + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + cancel: common.mustCall((error) => { + process.nextTick(() => assert(error.code, 25)); + }), + }; + + const readable = new ReadableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + + const reader = data.getReader(); + + const cancel = reader.cancel(m); + + reader.closed.then(common.mustCall()); + + assert.rejects(cancel, { + code: 25 + }); + + port1.close(); + }); + + port2.postMessage(readable, [readable]); +} + +{ + const source = { + abort: common.mustCall((error) => { + process.nextTick(() => { + assert.strictEqual(error.code, 25); + }); + }), + }; + + const writable = new WritableStream(source); + + const { port1, port2 } = new MessageChannel(); + + port1.onmessage = common.mustCall(({ data }) => { + const m = new WebAssembly.Memory({ initial: 1 }); + const writer = data.getWriter(); + const write = writer.write(m); + assert.rejects(write, { code: 25 }); + port1.close(); + }); + + port2.postMessage(writable, [writable]); +} + +{ + const readable = new ReadableStream(); + readable.getReader(); + assert.throws(() => readable[kTransfer](), { + code: 25 + }); + + const writable = new WritableStream(); + writable.getWriter(); + assert.throws(() => writable[kTransfer](), { + code: 25 + }); +} diff --git a/test/parallel/test-whatwg-writablestream.js b/test/parallel/test-whatwg-writablestream.js new file mode 100644 index 00000000000000..91e3c098462949 --- /dev/null +++ b/test/parallel/test-whatwg-writablestream.js @@ -0,0 +1,260 @@ +// Flags: --expose-internals --no-warnings +'use strict'; + +const common = require('../common'); +const assert = require('assert'); + +const { + WritableStream, + WritableStreamDefaultController, + WritableStreamDefaultWriter, + CountQueuingStrategy, +} = require('stream/web'); + +const { + kState, +} = require('internal/webstreams/util'); + +const { + isPromise, +} = require('util/types'); + +const { + kTransfer, +} = require('internal/worker/js_transferable'); + +const { + inspect, +} = require('util'); + +class Sink { + constructor() { + this.chunks = []; + } + + start() { + this.started = true; + } + + write(chunk) { + this.chunks.push(chunk); + } + + close() { + this.closed = true; + } + + abort() { + this.aborted = true; + } +} + +{ + const stream = new WritableStream(); + + assert(stream[kState].controller instanceof WritableStreamDefaultController); + assert(!stream.locked); + + assert.strictEqual(typeof stream.abort, 'function'); + assert.strictEqual(typeof stream.close, 'function'); + assert.strictEqual(typeof stream.getWriter, 'function'); +} + +[1, false, ''].forEach((type) => { + assert.throws(() => new WritableStream({ type }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', {}].forEach((highWaterMark) => { + assert.throws(() => new WritableStream({}, { highWaterMark }), { + code: 'ERR_INVALID_ARG_VALUE', + }); +}); + +['a', false, {}].forEach((size) => { + assert.throws(() => new WritableStream({}, { size }), { + code: 'ERR_INVALID_ARG_TYPE', + }); +}); + +{ + new WritableStream({}, 1); + new WritableStream({}, 'a'); + new WritableStream({}, null); +} + +{ + const sink = new Sink(); + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + assert(!stream.locked); + const writer = stream.getWriter(); + assert(stream.locked); + assert(writer instanceof WritableStreamDefaultWriter); + + assert(isPromise(writer.closed)); + assert(isPromise(writer.ready)); + assert(typeof writer.desiredSize, 'number'); + assert(typeof writer.abort, 'function'); + assert(typeof writer.close, 'function'); + assert(typeof writer.releaseLock, 'function'); + assert(typeof writer.write, 'function'); + + writer.releaseLock(); + assert(!stream.locked); + + const writer2 = stream.getWriter(); + + assert(sink.started); + + writer2.closed.then(common.mustCall()); + writer2.ready.then(common.mustCall()); + + writer2.close().then(common.mustCall(() => { + assert.strict(sink.closed); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, + new CountQueuingStrategy({ highWaterMark: 1 })); + + const error = new Error('boom'); + + const writer = stream.getWriter(); + + assert.rejects(writer.closed, error); + + writer.abort(error).then(common.mustCall(() => { + assert.strictEqual(stream[kState].state, 'errored'); + assert(sink.aborted); + })); +} + +{ + const sink = new Sink(); + + const stream = new WritableStream( + sink, { highWaterMark: 1 } + ); + + async function write(stream) { + const writer = stream.getWriter(); + const p = writer.write('hello'); + assert.strictEqual(writer.desiredSize, 0); + await p; + assert.strictEqual(writer.desiredSize, 1); + } + + write(stream).then(common.mustCall(() => { + assert.deepStrictEqual(['hello'], sink.chunks); + })); +} + +{ + assert.throws(() => Reflect.get(WritableStream.prototype, 'locked', {}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(() => WritableStream.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype.getWriter.call(), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStream.prototype[kTransfer].call(), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'closed'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects( + Reflect.get(WritableStreamDefaultWriter.prototype, 'ready'), { + code: 'ERR_INVALID_THIS', + }); + assert.throws( + () => Reflect.get(WritableStreamDefaultWriter.prototype, 'desiredSize'), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.abort({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.close({}), { + code: 'ERR_INVALID_THIS', + }); + assert.rejects(WritableStreamDefaultWriter.prototype.write({}), { + code: 'ERR_INVALID_THIS', + }); + assert.throws(() => WritableStreamDefaultWriter.prototype.releaseLock({}), { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'abortReason', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + Reflect.get(WritableStreamDefaultController.prototype, 'signal', {}); + }, { + code: 'ERR_INVALID_THIS', + }); + + assert.throws(() => { + WritableStreamDefaultController.prototype.error({}); + }, { + code: 'ERR_INVALID_THIS', + }); +} + +{ + let controller; + const writable = new WritableStream({ + start(c) { controller = c; } + }); + assert.strictEqual( + inspect(writable), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: null }), + 'WritableStream { locked: false, state: \'writable\' }'); + assert.strictEqual( + inspect(writable, { depth: 0 }), + 'WritableStream [Object]'); + + const writer = writable.getWriter(); + assert.match( + inspect(writer), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: null }), + /WritableStreamDefaultWriter/); + assert.match( + inspect(writer, { depth: 0 }), + /WritableStreamDefaultWriter \[/); + + assert.match( + inspect(controller), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: null }), + /WritableStreamDefaultController/); + assert.match( + inspect(controller, { depth: 0 }), + /WritableStreamDefaultController \[/); + + writer.abort(new Error('boom')); + + assert.strictEqual(writer.desiredSize, null); + setImmediate(() => assert.strictEqual(writer.desiredSize, null)); +} diff --git a/test/wpt/status/streams.json b/test/wpt/status/streams.json index 0967ef424bce67..c1b80d69dd8cd3 100644 --- a/test/wpt/status/streams.json +++ b/test/wpt/status/streams.json @@ -1 +1,11 @@ -{} +{ + "queuing-strategies-size-function-per-global.window.js": { + "skip": "Browser-specific test" + }, + "transferable/deserialize-error.window.js": { + "skip": "Browser-specific test" + }, + "readable-byte-streams/bad-buffers-and-views.any.js": { + "fail": "TODO: implement detached ArrayBuffer support" + } +} diff --git a/test/wpt/test-streams.js b/test/wpt/test-streams.js index 6a64f241c10e2d..987676d8c49125 100644 --- a/test/wpt/test-streams.js +++ b/test/wpt/test-streams.js @@ -10,7 +10,7 @@ runner.setFlags(['--expose-internals']); // Set a script that will be executed in the worker before running the tests. runner.setInitScript(` - const { + let { ReadableStream, ReadableStreamDefaultReader, ReadableStreamBYOBReader, @@ -29,19 +29,111 @@ runner.setInitScript(` const { internalBinding } = require('internal/test/binding'); const { DOMException } = internalBinding('messaging'); global.DOMException = DOMException; - global.ReadableStream = ReadableStream; - global.ReadableStreamDefaultReader = ReadableStreamDefaultReader; - global.ReadableStreamBYOBReader = ReadableStreamBYOBReader; - global.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest; - global.ReadableByteStreamController = ReadableByteStreamController; - global.ReadableStreamDefaultController = ReadableStreamDefaultController; - global.TransformStream = TransformStream; - global.TransformStreamDefaultController = TransformStreamDefaultController; - global.WritableStream = WritableStream; - global.WritableStreamDefaultWriter = WritableStreamDefaultWriter; - global.WritableStreamDefaultController = WritableStreamDefaultController; - global.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy; - global.CountQueuingStrategy = CountQueuingStrategy; + + Object.defineProperties(global, { + ReadableStream: { + value: ReadableStream, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultReader: { + value: ReadableStreamDefaultReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBReader: { + value: ReadableStreamBYOBReader, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamBYOBRequest: { + value: ReadableStreamBYOBRequest, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableByteStreamController: { + value: ReadableByteStreamController, + configurable: true, + writable: true, + enumerable: false, + }, + ReadableStreamDefaultController: { + value: ReadableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStream: { + value: TransformStream, + configurable: true, + writable: true, + enumerable: false, + }, + TransformStreamDefaultController: { + value: TransformStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStream: { + value: WritableStream, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultWriter: { + value: WritableStreamDefaultWriter, + configurable: true, + writable: true, + enumerable: false, + }, + WritableStreamDefaultController: { + value: WritableStreamDefaultController, + configurable: true, + writable: true, + enumerable: false, + }, + ByteLengthQueuingStrategy: { + value: ByteLengthQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + CountQueuingStrategy: { + value: CountQueuingStrategy, + configurable: true, + writable: true, + enumerable: false, + }, + }); + + // Simulate global postMessage for enqueue-with-detached-buffer.window.js + function postMessage(value, origin, transferList) { + const mc = new MessageChannel(); + mc.port1.postMessage(value, transferList); + mc.port2.close(); + } + + // TODO(@jasnell): This is a bit of a hack to get the idl harness test + // working. Later we should investigate a better approach. + // See: https://github.com/nodejs/node/pull/39062#discussion_r659383373 + Object.defineProperties(global, { + DedicatedWorkerGlobalScope: { + get() { + // Pretend that we're a DedicatedWorker, but *only* for the + // IDL harness. For everything else, keep the JavaScript shell + // environment. + if (new Error().stack.includes('idlharness.js')) + return global.constructor; + else + return function() {}; + } + } + }); `); runner.runJsTests(); diff --git a/tools/doc/type-parser.mjs b/tools/doc/type-parser.mjs index c2586a43254ecb..e3b8ad0ffac4bf 100644 --- a/tools/doc/type-parser.mjs +++ b/tools/doc/type-parser.mjs @@ -226,6 +226,33 @@ const customTypesMap = { 'X509Certificate': 'crypto.html#crypto_class_x509certificate', 'zlib options': 'zlib.html#zlib_class_options', + + 'ReadableStream': + 'webstreams.md#webstreamsapi_class_readablestream', + 'ReadableStreamDefaultReader': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultreader', + 'ReadableStreamBYOBReader': + 'webstreams.md#webstreamsapi_class_readablestreambyobreader', + 'ReadableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_readablestreamdefaultcontroller', + 'ReadableByteStreamController': + 'webstreams.md#webstreamsapi_class_readablebytestreamcontroller', + 'ReadableStreamBYOBRequest': + 'webstreams.md#webstreamsapi_class_readablestreambyobrequest', + 'WritableStream': + 'webstreams.md#webstreamsapi_class_writablestream', + 'WritableStreamDefaultWriter': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultwriter', + 'WritableStreamDefaultController': + 'webstreams.md#webstreamsapi_class_writablestreamdefaultcontroller', + 'TransformStream': + 'webstreams.md#webstreamsapi_class_transformstream', + 'TransformStreamDefaultController': + 'webstreams.md#webstreamsapi_class_transformstreamdefaultcontroller', + 'ByteLengthQueuingStrategy': + 'webstreams.md#webstreamsapi_class_bytelengthqueuingstrategy', + 'CountQueuingStrategy': + 'webstreams.md#webstreamsapi_class_countqueuingstrategy', }; const arrayPart = /(?:\[])+$/; diff --git a/typings/primordials.d.ts b/typings/primordials.d.ts index 0436e92b1d9b53..beed1d7b83c4c9 100644 --- a/typings/primordials.d.ts +++ b/typings/primordials.d.ts @@ -1,3 +1,5 @@ +import { AsyncIterator } from "internal/webstreams/util"; + type UncurryThis unknown> = (self: ThisParameterType, ...args: Parameters) => ReturnType; type UncurryThisStaticApply unknown> = @@ -9,15 +11,15 @@ type StaticApply unknown> = * Primordials are a way to safely use globals without fear of global mutation * Generally, this means removing `this` parameter usage and instead using * a regular parameter: - * + * * @example - * + * * ```js * 'thing'.startsWith('hello'); * ``` - * + * * becomes - * + * * ```js * primordials.StringPrototypeStartsWith('thing', 'hello') * ``` @@ -142,6 +144,7 @@ declare namespace primordials { export const ArrayBufferPrototype: typeof ArrayBuffer.prototype export const ArrayBufferIsView: typeof ArrayBuffer.isView export const ArrayBufferPrototypeSlice: UncurryThis + export const AsyncIteratorPrototype: UncurryThis export const BigInt: typeof globalThis.BigInt; export const BigIntLength: typeof BigInt.length export const BigIntName: typeof BigInt.name @@ -522,5 +525,5 @@ declare namespace primordials { export const PromiseAny: typeof Promise.any export const PromisePrototypeThen: UncurryThis export const PromisePrototypeCatch: UncurryThis - export const PromisePrototypeFinally: UncurryThis + export const PromisePrototypeFinally: UncurryThis } From af1e1dba36f162cbe21590509899ad65270435c2 Mon Sep 17 00:00:00 2001 From: legendecas Date: Thu, 24 Jun 2021 00:30:09 +0800 Subject: [PATCH 037/133] doc: correct JavaScript primitive value names in n-api.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39129 Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- doc/api/n-api.md | 108 +++++++++++++++++++++++------------------------ 1 file changed, 54 insertions(+), 54 deletions(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index 6115bc14674429..ec15f7cf7a41b2 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -1025,7 +1025,7 @@ clear the exception. On success, result will contain the handle to the last JavaScript `Object` thrown. If it is determined, after retrieving the exception, the exception cannot be handled after all it can be re-thrown it with [`napi_throw`][] where error is the -JavaScript `Error` object to be thrown. +JavaScript value to be thrown. The following utility functions are also available in case native code needs to throw an exception or determine if a `napi_value` is an instance @@ -1178,7 +1178,7 @@ NAPI_EXTERN napi_status napi_create_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -1202,7 +1202,7 @@ NAPI_EXTERN napi_status napi_create_type_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -1226,7 +1226,7 @@ NAPI_EXTERN napi_status napi_create_range_error(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] code`: Optional `napi_value` with the string for the error code to be associated with the error. -* `[in] msg`: `napi_value` that references a JavaScript `String` to be used as +* `[in] msg`: `napi_value` that references a JavaScript `string` to be used as the message for the `Error`. * `[out] result`: `napi_value` representing the error created. @@ -2357,14 +2357,14 @@ napi_status napi_create_symbol(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] description`: Optional `napi_value` which refers to a JavaScript - `String` to be set as the description for the symbol. -* `[out] result`: A `napi_value` representing a JavaScript `Symbol`. + `string` to be set as the description for the symbol. +* `[out] result`: A `napi_value` representing a JavaScript `symbol`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `Symbol` object from a UTF8-encoded C string. +This API creates a JavaScript `symbol` value from a UTF8-encoded C string. -The JavaScript `Symbol` type is described in [Section 19.4][] +The JavaScript `symbol` type is described in [Section 19.4][] of the ECMAScript Language Specification. #### napi_create_typedarray @@ -2451,14 +2451,14 @@ napi_status napi_create_int32(napi_env env, int32_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `int32_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_uint32 @@ -2473,14 +2473,14 @@ napi_status napi_create_uint32(napi_env env, uint32_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Unsigned integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `uint32_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_int64 @@ -2495,14 +2495,14 @@ napi_status napi_create_int64(napi_env env, int64_t value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Integer value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `int64_t` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in [Section 6.1.6][] +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. Note the complete range of `int64_t` cannot be represented with full precision in JavaScript. Integer values outside the range of [`Number.MIN_SAFE_INTEGER`][] `-(2**53 - 1)` - @@ -2520,14 +2520,14 @@ napi_status napi_create_double(napi_env env, double value, napi_value* result) * `[in] env`: The environment that the API is invoked under. * `[in] value`: Double-precision value to be represented in JavaScript. -* `[out] result`: A `napi_value` representing a JavaScript `Number`. +* `[out] result`: A `napi_value` representing a JavaScript `number`. Returns `napi_ok` if the API succeeded. This API is used to convert from the C `double` type to the JavaScript -`Number` type. +`number` type. -The JavaScript `Number` type is described in +The JavaScript `number` type is described in [Section 6.1.6][] of the ECMAScript Language Specification. #### napi_create_bigint_int64 @@ -2616,14 +2616,14 @@ napi_status napi_create_string_latin1(napi_env env, * `[in] str`: Character buffer representing an ISO-8859-1-encoded string. * `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from an ISO-8859-1-encoded C +This API creates a JavaScript `string` value from an ISO-8859-1-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. #### napi_create_string_utf16 @@ -2643,14 +2643,14 @@ napi_status napi_create_string_utf16(napi_env env, * `[in] str`: Character buffer representing a UTF16-LE-encoded string. * `[in] length`: The length of the string in two-byte code units, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from a UTF16-LE-encoded C string. +This API creates a JavaScript `string` value from a UTF16-LE-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. #### napi_create_string_utf8 @@ -2670,14 +2670,14 @@ napi_status napi_create_string_utf8(napi_env env, * `[in] str`: Character buffer representing a UTF8-encoded string. * `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it is null-terminated. -* `[out] result`: A `napi_value` representing a JavaScript `String`. +* `[out] result`: A `napi_value` representing a JavaScript `string`. Returns `napi_ok` if the API succeeded. -This API creates a JavaScript `String` object from a UTF8-encoded C string. +This API creates a JavaScript `string` value from a UTF8-encoded C string. The native string is copied. -The JavaScript `String` type is described in +The JavaScript `string` type is described in [Section 6.1.4][] of the ECMAScript Language Specification. ### Functions to convert from Node-API to C types @@ -2841,7 +2841,7 @@ napi_status napi_get_dataview_info(napi_env env, * `[in] env`: The environment that the API is invoked under. * `[in] dataview`: `napi_value` representing the `DataView` whose properties to query. -* `[out] byte_length`: `Number` of bytes in the `DataView`. +* `[out] byte_length`: Number of bytes in the `DataView`. * `[out] data`: The data buffer underlying the `DataView`. If byte_length is `0`, this may be `NULL` or any other pointer value. * `[out] arraybuffer`: `ArrayBuffer` underlying the `DataView`. @@ -2914,15 +2914,15 @@ napi_status napi_get_value_double(napi_env env, ``` * `[in] env`: The environment that the API is invoked under. -* `[in] value`: `napi_value` representing JavaScript `Number`. +* `[in] value`: `napi_value` representing JavaScript `number`. * `[out] result`: C double primitive equivalent of the given JavaScript - `Number`. + `number`. Returns `napi_ok` if the API succeeded. If a non-number `napi_value` is passed in it returns `napi_number_expected`. This API returns the C double primitive equivalent of the given JavaScript -`Number`. +`number`. #### napi_get_value_bigint_int64 - -> Stability: 1 - Experimental - -Measure the memory known to V8 and used by all contexts known to the -current V8 isolate, or the main context. - -* `options` {Object} Optional. - * `mode` {string} Either `'summary'` or `'detailed'`. In summary mode, - only the memory measured for the main context will be returned. In - detailed mode, the measure measured for all contexts known to the - current V8 isolate will be returned. - **Default:** `'summary'` - * `execution` {string} Either `'default'` or `'eager'`. With default - execution, the promise will not resolve until after the next scheduled - garbage collection starts, which may take a while (or never if the program - exits before the next GC). With eager execution, the GC will be started - right away to measure the memory. - **Default:** `'default'` -* Returns: {Promise} If the memory is successfully measured the promise will - resolve with an object containing information about the memory usage. - -The format of the object that the returned Promise may resolve with is -specific to the V8 engine and may change from one version of V8 to the next. - -The returned result is different from the statistics returned by -`v8.getHeapSpaceStatistics()` in that `vm.measureMemory()` measure the -memory reachable by each V8 specific contexts in the current instance of -the V8 engine, while the result of `v8.getHeapSpaceStatistics()` measure -the memory occupied by each heap space in the current V8 instance. - -```js -const vm = require('vm'); -// Measure the memory used by the main context. -vm.measureMemory({ mode: 'summary' }) - // This is the same as vm.measureMemory() - .then((result) => { - // The current format is: - // { - // total: { - // jsMemoryEstimate: 2418479, jsMemoryRange: [ 2418479, 2745799 ] - // } - // } - console.log(result); - }); - -const context = vm.createContext({ a: 1 }); -vm.measureMemory({ mode: 'detailed', execution: 'eager' }) - .then((result) => { - // Reference the context here so that it won't be GC'ed - // until the measurement is complete. - console.log(context.a); - // { - // total: { - // jsMemoryEstimate: 2574732, - // jsMemoryRange: [ 2574732, 2904372 ] - // }, - // current: { - // jsMemoryEstimate: 2438996, - // jsMemoryRange: [ 2438996, 2768636 ] - // }, - // other: [ - // { - // jsMemoryEstimate: 135736, - // jsMemoryRange: [ 135736, 465376 ] - // } - // ] - // } - console.log(result); - }); -``` - ## Class: `vm.Module` + +> Stability: 1 - Experimental + +Measure the memory known to V8 and used by all contexts known to the +current V8 isolate, or the main context. + +* `options` {Object} Optional. + * `mode` {string} Either `'summary'` or `'detailed'`. In summary mode, + only the memory measured for the main context will be returned. In + detailed mode, the measure measured for all contexts known to the + current V8 isolate will be returned. + **Default:** `'summary'` + * `execution` {string} Either `'default'` or `'eager'`. With default + execution, the promise will not resolve until after the next scheduled + garbage collection starts, which may take a while (or never if the program + exits before the next GC). With eager execution, the GC will be started + right away to measure the memory. + **Default:** `'default'` +* Returns: {Promise} If the memory is successfully measured the promise will + resolve with an object containing information about the memory usage. + +The format of the object that the returned Promise may resolve with is +specific to the V8 engine and may change from one version of V8 to the next. + +The returned result is different from the statistics returned by +`v8.getHeapSpaceStatistics()` in that `vm.measureMemory()` measure the +memory reachable by each V8 specific contexts in the current instance of +the V8 engine, while the result of `v8.getHeapSpaceStatistics()` measure +the memory occupied by each heap space in the current V8 instance. + +```js +const vm = require('vm'); +// Measure the memory used by the main context. +vm.measureMemory({ mode: 'summary' }) + // This is the same as vm.measureMemory() + .then((result) => { + // The current format is: + // { + // total: { + // jsMemoryEstimate: 2418479, jsMemoryRange: [ 2418479, 2745799 ] + // } + // } + console.log(result); + }); + +const context = vm.createContext({ a: 1 }); +vm.measureMemory({ mode: 'detailed', execution: 'eager' }) + .then((result) => { + // Reference the context here so that it won't be GC'ed + // until the measurement is complete. + console.log(context.a); + // { + // total: { + // jsMemoryEstimate: 2574732, + // jsMemoryRange: [ 2574732, 2904372 ] + // }, + // current: { + // jsMemoryEstimate: 2438996, + // jsMemoryRange: [ 2438996, 2768636 ] + // }, + // other: [ + // { + // jsMemoryEstimate: 135736, + // jsMemoryRange: [ 135736, 465376 ] + // } + // ] + // } + console.log(result); + }); +``` + ## `vm.runInContext(code, contextifiedObject[, options])` + +The `v8.stopCoverage()` method allows the user to stop the coverage collection +started by [`NODE_V8_COVERAGE`][], so that V8 can release the execution count +records and optimize code. This can be used in conjunction with +[`v8.takeCoverage()`][] if the user wants to collect the coverage on demand. + ## `v8.takeCoverage()` - -The `v8.stopCoverage()` method allows the user to stop the coverage collection -started by [`NODE_V8_COVERAGE`][], so that V8 can release the execution count -records and optimize code. This can be used in conjunction with -[`v8.takeCoverage()`][] if the user wants to collect the coverage on demand. - ## `v8.writeHeapSnapshot([filename])` - -* **Version**: -* **Platform**: -* **Subsystem**: - -### What steps will reproduce the bug? - - - -### How often does it reproduce? Is there a required condition? - -### What is the expected behavior? - - - -### What do you see instead? - - - -### Additional information - - diff --git a/.github/ISSUE_TEMPLATE/1-bug-report.yml b/.github/ISSUE_TEMPLATE/1-bug-report.yml new file mode 100644 index 00000000000000..965a2eca45d978 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/1-bug-report.yml @@ -0,0 +1,45 @@ +name: "\U0001F41B Bug report" +description: Create a report to help us improve +body: + - type: markdown + attributes: + value: | + Thank you for reporting an issue. + + This issue tracker is for bugs and issues found within Node.js core. + If you require more general support please file an issue on our help repo. https://github.com/nodejs/help + + Please fill in as much of the form below as you're able. + - type: input + attributes: + label: Version + description: Output of `node -v` + - type: input + attributes: + label: Platform + description: | + UNIX: output of `uname -a` + Windows: output of `"$([Environment]::OSVersion | ForEach-Object VersionString) $(if ([Environment]::Is64BitOperatingSystem) { "x64" } else { "x86" })"` in PowerShell console + - type: input + attributes: + label: Subsystem + description: If known, please specify affected core module name + - type: textarea + attributes: + label: What steps will reproduce the bug? + description: Enter details about your bug, preferably a simple code snippet that can be run using `node` directly without installing third-party dependencies. + - type: textarea + attributes: + label: How often does it reproduce? Is there a required condition? + - type: textarea + attributes: + label: What is the expected behavior? + description: If possible please provide textual output instead of screenshots. + - type: textarea + attributes: + label: What do you see instead? + description: If possible please provide textual output instead of screenshots. + - type: textarea + attributes: + label: Additional information + description: Tell us anything else you think we should know. From 2bb3713b748181144ef269a82024f43f6a22bf2c Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 3 Jul 2021 08:13:21 -0700 Subject: [PATCH 069/133] doc: update AUTHORS file PR-URL: https://github.com/nodejs/node/pull/39250 Reviewed-By: Darshan Sen Reviewed-By: Zijian Liu --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 8dd339b08944f8..59b9fab48ac60c 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3318,5 +3318,6 @@ Mao Wtm Houssem Chebab Davidson Francis Rohan Sharma +AkshayK # Generated by tools/update-authors.js From cb8c6ffbcef2dc7aa5fbb9129c905751e27fbef5 Mon Sep 17 00:00:00 2001 From: cjihrig Date: Fri, 2 Jul 2021 20:09:28 -0400 Subject: [PATCH 070/133] tools: update ESLint to 7.30.0 Update ESLint to 7.30.0 PR-URL: https://github.com/nodejs/node/pull/39242 Reviewed-By: Rich Trott Reviewed-By: Zijian Liu Reviewed-By: Luigi Pinca --- .../node_modules/@types/unist/LICENSE | 42 +- .../node_modules/@types/unist/README.md | 112 ++++- .../node_modules/@types/unist/package.json | 14 +- tools/node_modules/eslint/README.md | 5 + .../eslint/lib/config/default-config.js | 52 ++ .../eslint/lib/config/flat-config-array.js | 125 +++++ .../eslint/lib/config/flat-config-schema.js | 452 +++++++++++++++++ .../eslint/lib/config/rule-validator.js | 169 +++++++ .../node_modules/eslint/lib/linter/linter.js | 28 +- .../eslint/lib/rule-tester/rule-tester.js | 6 + .../eslint/lib/rules/dot-notation.js | 6 +- .../eslint/lib/rules/prefer-arrow-callback.js | 8 +- .../eslint/lib/rules/use-isnan.js | 5 +- .../eslint/lib/source-code/source-code.js | 4 +- .../@humanwhocodes/config-array/LICENSE | 201 ++++++++ .../@humanwhocodes/config-array/README.md | 258 ++++++++++ .../@humanwhocodes/config-array/api.js | 457 ++++++++++++++++++ .../@humanwhocodes/config-array/package.json | 61 +++ .../@humanwhocodes/object-schema/LICENSE | 29 ++ .../@humanwhocodes/object-schema/README.md | 234 +++++++++ .../@humanwhocodes/object-schema/package.json | 33 ++ .../@humanwhocodes/object-schema/src/index.js | 7 + .../object-schema/src/merge-strategy.js | 53 ++ .../object-schema/src/object-schema.js | 239 +++++++++ .../object-schema/src/validation-strategy.js | 102 ++++ .../eslint/node_modules/flatted/README.md | 32 +- .../eslint/node_modules/flatted/cjs/index.js | 5 + .../eslint/node_modules/flatted/es.js | 2 +- .../eslint/node_modules/flatted/esm/index.js | 3 + .../eslint/node_modules/flatted/index.js | 8 + .../eslint/node_modules/flatted/min.js | 2 +- .../eslint/node_modules/flatted/package.json | 14 +- .../node_modules/flatted/php/flatted.php | 36 +- tools/node_modules/eslint/package.json | 5 +- 34 files changed, 2715 insertions(+), 94 deletions(-) mode change 100644 => 100755 tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE mode change 100644 => 100755 tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md mode change 100644 => 100755 tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json create mode 100644 tools/node_modules/eslint/lib/config/default-config.js create mode 100644 tools/node_modules/eslint/lib/config/flat-config-array.js create mode 100644 tools/node_modules/eslint/lib/config/flat-config-schema.js create mode 100644 tools/node_modules/eslint/lib/config/rule-validator.js create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js create mode 100644 tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE old mode 100644 new mode 100755 index 4b1ad51b2f0efc..9e841e7a26e4eb --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/LICENSE @@ -1,21 +1,21 @@ - MIT License - - Copyright (c) Microsoft Corporation. All rights reserved. - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE + MIT License + + Copyright (c) Microsoft Corporation. + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md old mode 100644 new mode 100755 index a15402a4524206..78a310d17c9fab --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/README.md @@ -2,15 +2,117 @@ > `npm install --save @types/unist` # Summary -This package contains type definitions for non-npm package Unist ( https://github.com/syntax-tree/unist ). +This package contains type definitions for Unist (https://github.com/syntax-tree/unist). # Details -Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist +Files were exported from https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist. +## [index.d.ts](https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist/index.d.ts) +````ts +// Type definitions for non-npm package Unist 2.0 +// Project: https://github.com/syntax-tree/unist +// Definitions by: bizen241 +// Jun Lu +// Hernan Rajchert +// Titus Wormer +// Junyoung Choi +// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped +// TypeScript Version: 3.0 + +/** + * Syntactic units in unist syntax trees are called nodes. + */ +export interface Node { + /** + * The variant of a node. + */ + type: string; + + /** + * Information from the ecosystem. + */ + data?: Data | undefined; + + /** + * Location of a node in a source document. + * Must not be present if a node is generated. + */ + position?: Position | undefined; + + [key: string]: unknown; +} + +/** + * Information associated by the ecosystem with the node. + * Space is guaranteed to never be specified by unist or specifications + * implementing unist. + */ +export interface Data { + [key: string]: unknown; +} + +/** + * Location of a node in a source file. + */ +export interface Position { + /** + * Place of the first character of the parsed source region. + */ + start: Point; + + /** + * Place of the first character after the parsed source region. + */ + end: Point; + + /** + * Start column at each index (plus start line) in the source region, + * for elements that span multiple lines. + */ + indent?: number[] | undefined; +} + +/** + * One place in a source file. + */ +export interface Point { + /** + * Line in a source file (1-indexed integer). + */ + line: number; + + /** + * Column in a source file (1-indexed integer). + */ + column: number; + /** + * Character in a source file (0-indexed integer). + */ + offset?: number | undefined; +} + +/** + * Nodes containing other nodes. + */ +export interface Parent extends Node { + /** + * List representing the children of a node. + */ + children: Node[]; +} + +/** + * Nodes containing a value. + */ +export interface Literal extends Node { + value: unknown; +} -Additional Details - * Last updated: Thu, 14 Feb 2019 18:10:46 GMT +```` + +### Additional Details + * Last updated: Fri, 02 Jul 2021 18:04:49 GMT * Dependencies: none * Global values: none # Credits -These definitions were written by bizen241 , Jun Lu , Hernan Rajchert , Titus Wormer , Junyoung Choi . +These definitions were written by [bizen241](https://github.com/bizen241), [Jun Lu](https://github.com/lujun2), [Hernan Rajchert](https://github.com/hrajchert), [Titus Wormer](https://github.com/wooorm), and [Junyoung Choi](https://github.com/rokt33r). diff --git a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json old mode 100644 new mode 100755 index 78fa62811fa768..d4d18b7cbe9b59 --- a/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json +++ b/tools/node_modules/eslint-plugin-markdown/node_modules/@types/unist/package.json @@ -1,7 +1,8 @@ { "name": "@types/unist", - "version": "2.0.3", - "description": "TypeScript definitions for non-npm package Unist", + "version": "2.0.4", + "description": "TypeScript definitions for Unist", + "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/unist", "license": "MIT", "contributors": [ { @@ -31,13 +32,14 @@ } ], "main": "", - "types": "index", + "types": "index.d.ts", "repository": { "type": "git", - "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git" + "url": "https://github.com/DefinitelyTyped/DefinitelyTyped.git", + "directory": "types/unist" }, "scripts": {}, "dependencies": {}, - "typesPublisherContentHash": "555fe20f164ccded02a3f69d8b45c8c9d2ec6fd53844a7c7858a3001c281bc9b", - "typeScriptVersion": "3.0" + "typesPublisherContentHash": "373f3a8c09fdf9fa50470b9d6b720dbe014e0fe93cb797a34481c4231e8fab59", + "typeScriptVersion": "3.6" } \ No newline at end of file diff --git a/tools/node_modules/eslint/README.md b/tools/node_modules/eslint/README.md index 2202900d1940fc..c87d07e0dcd1ac 100644 --- a/tools/node_modules/eslint/README.md +++ b/tools/node_modules/eslint/README.md @@ -268,6 +268,11 @@ Anix
YeonJuan + + +
+Nitin Kumar +
diff --git a/tools/node_modules/eslint/lib/config/default-config.js b/tools/node_modules/eslint/lib/config/default-config.js new file mode 100644 index 00000000000000..cb6f403380dfe1 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/default-config.js @@ -0,0 +1,52 @@ +/** + * @fileoverview Default configuration + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const Rules = require("../rules"); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + + +exports.defaultConfig = [ + { + plugins: { + "@": { + parsers: { + espree: require("espree") + }, + + /* + * Because we try to delay loading rules until absolutely + * necessary, a proxy allows us to hook into the lazy-loading + * aspect of the rules map while still keeping all of the + * relevant configuration inside of the config array. + */ + rules: new Proxy({}, { + get(target, property) { + return Rules.get(property); + }, + + has(target, property) { + return Rules.has(property); + } + }) + } + }, + ignores: [ + "**/node_modules/**", + ".git/**" + ], + languageOptions: { + parser: "@/espree" + } + } +]; diff --git a/tools/node_modules/eslint/lib/config/flat-config-array.js b/tools/node_modules/eslint/lib/config/flat-config-array.js new file mode 100644 index 00000000000000..ecf396a3314c18 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/flat-config-array.js @@ -0,0 +1,125 @@ +/** + * @fileoverview Flat Config Array + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const { ConfigArray, ConfigArraySymbol } = require("@humanwhocodes/config-array"); +const { flatConfigSchema } = require("./flat-config-schema"); +const { RuleValidator } = require("./rule-validator"); +const { defaultConfig } = require("./default-config"); +const recommendedConfig = require("../../conf/eslint-recommended"); +const allConfig = require("../../conf/eslint-all"); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +const ruleValidator = new RuleValidator(); + +/** + * Splits a plugin identifier in the form a/b/c into two parts: a/b and c. + * @param {string} identifier The identifier to parse. + * @returns {{objectName: string, pluginName: string}} The parts of the plugin + * name. + */ +function splitPluginIdentifier(identifier) { + const parts = identifier.split("/"); + + return { + objectName: parts.pop(), + pluginName: parts.join("/") + }; +} + +//----------------------------------------------------------------------------- +// Exports +//----------------------------------------------------------------------------- + +/** + * Represents an array containing configuration information for ESLint. + */ +class FlatConfigArray extends ConfigArray { + + /** + * Creates a new instance. + * @param {*[]} configs An array of configuration information. + * @param {{basePath: string, baseConfig: FlatConfig}} options The options + * to use for the config array instance. + */ + constructor(configs, { basePath, baseConfig = defaultConfig }) { + super(configs, { + basePath, + schema: flatConfigSchema + }); + + this.unshift(baseConfig); + } + + /* eslint-disable class-methods-use-this */ + /** + * Replaces a config with another config to allow us to put strings + * in the config array that will be replaced by objects before + * normalization. + * @param {Object} config The config to preprocess. + * @returns {Object} The preprocessed config. + */ + [ConfigArraySymbol.preprocessConfig](config) { + if (config === "eslint:recommended") { + return recommendedConfig; + } + + if (config === "eslint:all") { + return allConfig; + } + + return config; + } + + /** + * Finalizes the config by replacing plugin references with their objects + * and validating rule option schemas. + * @param {Object} config The config to finalize. + * @returns {Object} The finalized config. + * @throws {TypeError} If the config is invalid. + */ + [ConfigArraySymbol.finalizeConfig](config) { + + const { plugins, languageOptions, processor } = config; + + // Check parser value + if (languageOptions && languageOptions.parser && typeof languageOptions.parser === "string") { + const { pluginName, objectName: parserName } = splitPluginIdentifier(languageOptions.parser); + + if (!plugins || !plugins[pluginName] || !plugins[pluginName].parsers || !plugins[pluginName].parsers[parserName]) { + throw new TypeError(`Key "parser": Could not find "${parserName}" in plugin "${pluginName}".`); + } + + languageOptions.parser = plugins[pluginName].parsers[parserName]; + } + + // Check processor value + if (processor && typeof processor === "string") { + const { pluginName, objectName: processorName } = splitPluginIdentifier(processor); + + if (!plugins || !plugins[pluginName] || !plugins[pluginName].processors || !plugins[pluginName].processors[processorName]) { + throw new TypeError(`Key "processor": Could not find "${processorName}" in plugin "${pluginName}".`); + } + + config.processor = plugins[pluginName].processors[processorName]; + } + + ruleValidator.validate(config); + + return config; + } + /* eslint-enable class-methods-use-this */ + +} + +exports.FlatConfigArray = FlatConfigArray; diff --git a/tools/node_modules/eslint/lib/config/flat-config-schema.js b/tools/node_modules/eslint/lib/config/flat-config-schema.js new file mode 100644 index 00000000000000..8078547613352c --- /dev/null +++ b/tools/node_modules/eslint/lib/config/flat-config-schema.js @@ -0,0 +1,452 @@ +/** + * @fileoverview Flat config schema + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Type Definitions +//----------------------------------------------------------------------------- + +/** + * @typedef ObjectPropertySchema + * @property {Function|string} merge The function or name of the function to call + * to merge multiple objects with this property. + * @property {Function|string} validate The function or name of the function to call + * to validate the value of this property. + */ + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +const ruleSeverities = new Map([ + [0, 0], ["off", 0], + [1, 1], ["warn", 1], + [2, 2], ["error", 2] +]); + +const globalVariablesValues = new Set([ + true, "true", "writable", "writeable", + false, "false", "readonly", "readable", null, + "off" +]); + +/** + * Check if a value is a non-null object. + * @param {any} value The value to check. + * @returns {boolean} `true` if the value is a non-null object. + */ +function isNonNullObject(value) { + return typeof value === "object" && value !== null; +} + +/** + * Check if a value is undefined. + * @param {any} value The value to check. + * @returns {boolean} `true` if the value is undefined. + */ +function isUndefined(value) { + return typeof value === "undefined"; +} + +/** + * Deeply merges two objects. + * @param {Object} first The base object. + * @param {Object} second The overrides object. + * @returns {Object} An object with properties from both first and second. + */ +function deepMerge(first = {}, second = {}) { + + /* + * If the second value is an array, just return it. We don't merge + * arrays because order matters and we can't know the correct order. + */ + if (Array.isArray(second)) { + return second; + } + + /* + * First create a result object where properties from the second object + * overwrite properties from the first. This sets up a baseline to use + * later rather than needing to inspect and change every property + * individually. + */ + const result = { + ...first, + ...second + }; + + for (const key of Object.keys(second)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + const firstValue = first[key]; + const secondValue = second[key]; + + if (isNonNullObject(firstValue)) { + result[key] = deepMerge(firstValue, secondValue); + } else if (isUndefined(firstValue)) { + if (isNonNullObject(secondValue)) { + result[key] = deepMerge( + Array.isArray(secondValue) ? [] : {}, + secondValue + ); + } else if (!isUndefined(secondValue)) { + result[key] = secondValue; + } + } + } + + return result; + +} + +/** + * Normalizes the rule options config for a given rule by ensuring that + * it is an array and that the first item is 0, 1, or 2. + * @param {Array|string|number} ruleOptions The rule options config. + * @returns {Array} An array of rule options. + */ +function normalizeRuleOptions(ruleOptions) { + + const finalOptions = Array.isArray(ruleOptions) + ? ruleOptions.slice(0) + : [ruleOptions]; + + finalOptions[0] = ruleSeverities.get(finalOptions[0]); + return finalOptions; +} + +//----------------------------------------------------------------------------- +// Assertions +//----------------------------------------------------------------------------- + +/** + * Validates that a value is a valid rule options entry. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't a valid rule options. + */ +function assertIsRuleOptions(value) { + + if (typeof value !== "string" && typeof value !== "number" && !Array.isArray(value)) { + throw new TypeError("Expected a string, number, or array."); + } +} + +/** + * Validates that a value is valid rule severity. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't a valid rule severity. + */ +function assertIsRuleSeverity(value) { + const severity = typeof value === "string" + ? ruleSeverities.get(value.toLowerCase()) + : ruleSeverities.get(value); + + if (typeof severity === "undefined") { + throw new TypeError("Expected severity of \"off\", 0, \"warn\", 1, \"error\", or 2."); + } +} + +/** + * Validates that a given string is the form pluginName/objectName. + * @param {string} value The string to check. + * @returns {void} + * @throws {TypeError} If the string isn't in the correct format. + */ +function assertIsPluginMemberName(value) { + if (!/[@a-z0-9-_$]+(?:\/(?:[a-z0-9-_$]+))+$/iu.test(value)) { + throw new TypeError(`Expected string in the form "pluginName/objectName" but found "${value}".`); + } +} + +/** + * Validates that a value is an object. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't an object. + */ +function assertIsObject(value) { + if (!isNonNullObject(value)) { + throw new TypeError("Expected an object."); + } +} + +/** + * Validates that a value is an object or a string. + * @param {any} value The value to check. + * @returns {void} + * @throws {TypeError} If the value isn't an object or a string. + */ +function assertIsObjectOrString(value) { + if ((!value || typeof value !== "object") && typeof value !== "string") { + throw new TypeError("Expected an object or string."); + } +} + +//----------------------------------------------------------------------------- +// Low-Level Schemas +//----------------------------------------------------------------------------- + + +/** @type {ObjectPropertySchema} */ +const numberSchema = { + merge: "replace", + validate: "number" +}; + +/** @type {ObjectPropertySchema} */ +const booleanSchema = { + merge: "replace", + validate: "boolean" +}; + +/** @type {ObjectPropertySchema} */ +const deepObjectAssignSchema = { + merge(first = {}, second = {}) { + return deepMerge(first, second); + }, + validate: "object" +}; + +//----------------------------------------------------------------------------- +// High-Level Schemas +//----------------------------------------------------------------------------- + +/** @type {ObjectPropertySchema} */ +const globalsSchema = { + merge: "assign", + validate(value) { + + assertIsObject(value); + + for (const key of Object.keys(value)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (key !== key.trim()) { + throw new TypeError(`Global "${key}" has leading or trailing whitespace.`); + } + + if (!globalVariablesValues.has(value[key])) { + throw new TypeError(`Key "${key}": Expected "readonly", "writable", or "off".`); + } + } + } +}; + +/** @type {ObjectPropertySchema} */ +const parserSchema = { + merge: "replace", + validate(value) { + assertIsObjectOrString(value); + + if (typeof value === "object" && typeof value.parse !== "function" && typeof value.parseForESLint !== "function") { + throw new TypeError("Expected object to have a parse() or parseForESLint() method."); + } + + if (typeof value === "string") { + assertIsPluginMemberName(value); + } + } +}; + +/** @type {ObjectPropertySchema} */ +const pluginsSchema = { + merge(first = {}, second = {}) { + const keys = new Set([...Object.keys(first), ...Object.keys(second)]); + const result = {}; + + // manually validate that plugins are not redefined + for (const key of keys) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (key in first && key in second && first[key] !== second[key]) { + throw new TypeError(`Cannot redefine plugin "${key}".`); + } + + result[key] = second[key] || first[key]; + } + + return result; + }, + validate(value) { + + // first check the value to be sure it's an object + if (value === null || typeof value !== "object") { + throw new TypeError("Expected an object."); + } + + // second check the keys to make sure they are objects + for (const key of Object.keys(value)) { + + // avoid hairy edge case + if (key === "__proto__") { + continue; + } + + if (value[key] === null || typeof value[key] !== "object") { + throw new TypeError(`Key "${key}": Expected an object.`); + } + } + } +}; + +/** @type {ObjectPropertySchema} */ +const processorSchema = { + merge: "replace", + validate(value) { + if (typeof value === "string") { + assertIsPluginMemberName(value); + } else if (value && typeof value === "object") { + if (typeof value.preprocess !== "function" || typeof value.postprocess !== "function") { + throw new TypeError("Object must have a preprocess() and a postprocess() method."); + } + } else { + throw new TypeError("Expected an object or a string."); + } + } +}; + +/** @type {ObjectPropertySchema} */ +const rulesSchema = { + merge(first = {}, second = {}) { + + const result = { + ...first, + ...second + }; + + for (const ruleId of Object.keys(result)) { + + // avoid hairy edge case + if (ruleId === "__proto__") { + + /* eslint-disable-next-line no-proto */ + delete result.__proto__; + continue; + } + + result[ruleId] = normalizeRuleOptions(result[ruleId]); + + /* + * If either rule config is missing, then the correct + * config is already present and we just need to normalize + * the severity. + */ + if (!(ruleId in first) || !(ruleId in second)) { + continue; + } + + const firstRuleOptions = normalizeRuleOptions(first[ruleId]); + const secondRuleOptions = normalizeRuleOptions(second[ruleId]); + + /* + * If the second rule config only has a severity (length of 1), + * then use that severity and keep the rest of the options from + * the first rule config. + */ + if (secondRuleOptions.length === 1) { + result[ruleId] = [secondRuleOptions[0], ...firstRuleOptions.slice(1)]; + continue; + } + + /* + * In any other situation, then the second rule config takes + * precedence. That means the value at `result[ruleId]` is + * already correct and no further work is necessary. + */ + } + + return result; + }, + + validate(value) { + assertIsObject(value); + + let lastRuleId; + + // Performance: One try-catch has less overhead than one per loop iteration + try { + + /* + * We are not checking the rule schema here because there is no + * guarantee that the rule definition is present at this point. Instead + * we wait and check the rule schema during the finalization step + * of calculating a config. + */ + for (const ruleId of Object.keys(value)) { + + // avoid hairy edge case + if (ruleId === "__proto__") { + continue; + } + + lastRuleId = ruleId; + + const ruleOptions = value[ruleId]; + + assertIsRuleOptions(ruleOptions); + + if (Array.isArray(ruleOptions)) { + assertIsRuleSeverity(ruleOptions[0]); + } else { + assertIsRuleSeverity(ruleOptions); + } + } + } catch (error) { + error.message = `Key "${lastRuleId}": ${error.message}`; + throw error; + } + } +}; + +/** @type {ObjectPropertySchema} */ +const sourceTypeSchema = { + merge: "replace", + validate(value) { + if (typeof value !== "string" || !/^(?:script|module|commonjs)$/u.test(value)) { + throw new TypeError("Expected \"script\", \"module\", or \"commonjs\"."); + } + } +}; + +//----------------------------------------------------------------------------- +// Full schema +//----------------------------------------------------------------------------- + +exports.flatConfigSchema = { + settings: deepObjectAssignSchema, + linterOptions: { + schema: { + noInlineConfig: booleanSchema, + reportUnusedDisableDirectives: booleanSchema + } + }, + languageOptions: { + schema: { + ecmaVersion: numberSchema, + sourceType: sourceTypeSchema, + globals: globalsSchema, + parser: parserSchema, + parserOptions: deepObjectAssignSchema + } + }, + processor: processorSchema, + plugins: pluginsSchema, + rules: rulesSchema +}; diff --git a/tools/node_modules/eslint/lib/config/rule-validator.js b/tools/node_modules/eslint/lib/config/rule-validator.js new file mode 100644 index 00000000000000..f162dd81a05095 --- /dev/null +++ b/tools/node_modules/eslint/lib/config/rule-validator.js @@ -0,0 +1,169 @@ +/** + * @fileoverview Rule Validator + * @author Nicholas C. Zakas + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const ajv = require("../shared/ajv")(); + +//----------------------------------------------------------------------------- +// Helpers +//----------------------------------------------------------------------------- + +/** + * Finds a rule with the given ID in the given config. + * @param {string} ruleId The ID of the rule to find. + * @param {Object} config The config to search in. + * @returns {{create: Function, schema: (Array|null)}} THe rule object. + */ +function findRuleDefinition(ruleId, config) { + const ruleIdParts = ruleId.split("/"); + let pluginName, ruleName; + + // built-in rule + if (ruleIdParts.length === 1) { + pluginName = "@"; + ruleName = ruleIdParts[0]; + } else { + ruleName = ruleIdParts.pop(); + pluginName = ruleIdParts.join("/"); + } + + if (!config.plugins || !config.plugins[pluginName]) { + throw new TypeError(`Key "rules": Key "${ruleId}": Could not find plugin "${pluginName}".`); + } + + if (!config.plugins[pluginName].rules || !config.plugins[pluginName].rules[ruleName]) { + throw new TypeError(`Key "rules": Key "${ruleId}": Could not find "${ruleName}" in plugin "${pluginName}".`); + } + + return config.plugins[pluginName].rules[ruleName]; + +} + +/** + * Gets a complete options schema for a rule. + * @param {{create: Function, schema: (Array|null)}} rule A new-style rule object + * @returns {Object} JSON Schema for the rule's options. + */ +function getRuleOptionsSchema(rule) { + + if (!rule) { + return null; + } + + const schema = rule.schema || rule.meta && rule.meta.schema; + + if (Array.isArray(schema)) { + if (schema.length) { + return { + type: "array", + items: schema, + minItems: 0, + maxItems: schema.length + }; + } + return { + type: "array", + minItems: 0, + maxItems: 0 + }; + + } + + // Given a full schema, leave it alone + return schema || null; +} + +//----------------------------------------------------------------------------- +// Exports +//----------------------------------------------------------------------------- + +/** + * Implements validation functionality for the rules portion of a config. + */ +class RuleValidator { + + /** + * Creates a new instance. + */ + constructor() { + + /** + * A collection of compiled validators for rules that have already + * been validated. + * @type {WeakMap} + * @property validators + */ + this.validators = new WeakMap(); + } + + /** + * Validates all of the rule configurations in a config against each + * rule's schema. + * @param {Object} config The full config to validate. This object must + * contain both the rules section and the plugins section. + * @returns {void} + * @throws {Error} If a rule's configuration does not match its schema. + */ + validate(config) { + + if (!config.rules) { + return; + } + + for (const [ruleId, ruleOptions] of Object.entries(config.rules)) { + + // check for edge case + if (ruleId === "__proto__") { + continue; + } + + /* + * If a rule is disabled, we don't do any validation. This allows + * users to safely set any value to 0 or "off" without worrying + * that it will cause a validation error. + * + * Note: ruleOptions is always an array at this point because + * this validation occurs after FlatConfigArray has merged and + * normalized values. + */ + if (ruleOptions[0] === 0) { + continue; + } + + const rule = findRuleDefinition(ruleId, config); + + // Precompile and cache validator the first time + if (!this.validators.has(rule)) { + const schema = getRuleOptionsSchema(rule); + + if (schema) { + this.validators.set(rule, ajv.compile(schema)); + } + } + + const validateRule = this.validators.get(rule); + + if (validateRule) { + + validateRule(ruleOptions.slice(1)); + + if (validateRule.errors) { + throw new Error(`Key "rules": Key "${ruleId}": ${ + validateRule.errors.map( + error => `\tValue ${JSON.stringify(error.data)} ${error.message}.\n` + ).join("") + }`); + } + } + } + } +} + +exports.RuleValidator = RuleValidator; diff --git a/tools/node_modules/eslint/lib/linter/linter.js b/tools/node_modules/eslint/lib/linter/linter.js index e94b507b5dd30a..4e80926a895ec6 100644 --- a/tools/node_modules/eslint/lib/linter/linter.js +++ b/tools/node_modules/eslint/lib/linter/linter.js @@ -37,8 +37,10 @@ const const debug = require("debug")("eslint:linter"); const MAX_AUTOFIX_PASSES = 10; const DEFAULT_PARSER_NAME = "espree"; +const DEFAULT_ECMA_VERSION = 5; const commentParser = new ConfigCommentParser(); const DEFAULT_ERROR_LOC = { start: { line: 1, column: 0 }, end: { line: 1, column: 1 } }; +const parserSymbol = Symbol.for("eslint.RuleTester.parser"); //------------------------------------------------------------------------------ // Typedefs @@ -432,10 +434,16 @@ function getDirectiveComments(filename, ast, ruleMapper, warnInlineConfig) { /** * Normalize ECMAScript version from the initial config - * @param {number} ecmaVersion ECMAScript version from the initial config + * @param {Parser} parser The parser which uses this options. + * @param {number} ecmaVersion ECMAScript version from the initial config * @returns {number} normalized ECMAScript version */ -function normalizeEcmaVersion(ecmaVersion) { +function normalizeEcmaVersion(parser, ecmaVersion) { + if ((parser[parserSymbol] || parser) === espree) { + if (ecmaVersion === "latest") { + return espree.latestEcmaVersion; + } + } /* * Calculate ECMAScript edition number from official year version starting with @@ -521,12 +529,13 @@ function normalizeVerifyOptions(providedOptions, config) { /** * Combines the provided parserOptions with the options from environments - * @param {string} parserName The parser name which uses this options. + * @param {Parser} parser The parser which uses this options. * @param {ParserOptions} providedOptions The provided 'parserOptions' key in a config * @param {Environment[]} enabledEnvironments The environments enabled in configuration and with inline comments * @returns {ParserOptions} Resulting parser options after merge */ -function resolveParserOptions(parserName, providedOptions, enabledEnvironments) { +function resolveParserOptions(parser, providedOptions, enabledEnvironments) { + const parserOptionsFromEnv = enabledEnvironments .filter(env => env.parserOptions) .reduce((parserOptions, env) => merge(parserOptions, env.parserOptions), {}); @@ -542,12 +551,7 @@ function resolveParserOptions(parserName, providedOptions, enabledEnvironments) mergedParserOptions.ecmaFeatures = Object.assign({}, mergedParserOptions.ecmaFeatures, { globalReturn: false }); } - /* - * TODO: @aladdin-add - * 1. for a 3rd-party parser, do not normalize parserOptions - * 2. for espree, no need to do this (espree will do it) - */ - mergedParserOptions.ecmaVersion = normalizeEcmaVersion(mergedParserOptions.ecmaVersion); + mergedParserOptions.ecmaVersion = normalizeEcmaVersion(parser, mergedParserOptions.ecmaVersion); return mergedParserOptions; } @@ -606,7 +610,7 @@ function getRuleOptions(ruleConfig) { */ function analyzeScope(ast, parserOptions, visitorKeys) { const ecmaFeatures = parserOptions.ecmaFeatures || {}; - const ecmaVersion = parserOptions.ecmaVersion || 5; + const ecmaVersion = parserOptions.ecmaVersion || DEFAULT_ECMA_VERSION; return eslintScope.analyze(ast, { ignoreEval: true, @@ -1123,7 +1127,7 @@ class Linter { .map(envName => getEnv(slots, envName)) .filter(env => env); - const parserOptions = resolveParserOptions(parserName, config.parserOptions || {}, enabledEnvs); + const parserOptions = resolveParserOptions(parser, config.parserOptions || {}, enabledEnvs); const configuredGlobals = resolveGlobals(config.globals || {}, enabledEnvs); const settings = config.settings || {}; diff --git a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js index cac81bc71d150f..2b5524923bea7c 100644 --- a/tools/node_modules/eslint/lib/rule-tester/rule-tester.js +++ b/tools/node_modules/eslint/lib/rule-tester/rule-tester.js @@ -53,6 +53,7 @@ const const ajv = require("../shared/ajv")({ strictDefaults: true }); const espreePath = require.resolve("espree"); +const parserSymbol = Symbol.for("eslint.RuleTester.parser"); //------------------------------------------------------------------------------ // Typedefs @@ -239,6 +240,7 @@ function defineStartEndAsError(objName, node) { }); } + /** * Define `start`/`end` properties of all nodes of the given AST as throwing error. * @param {ASTNode} ast The root node to errorize `start`/`end` properties. @@ -258,8 +260,10 @@ function defineStartEndAsErrorInTree(ast, visitorKeys) { * @returns {Parser} Wrapped parser object. */ function wrapParser(parser) { + if (typeof parser.parseForESLint === "function") { return { + [parserSymbol]: parser, parseForESLint(...args) { const ret = parser.parseForESLint(...args); @@ -268,7 +272,9 @@ function wrapParser(parser) { } }; } + return { + [parserSymbol]: parser, parse(...args) { const ast = parser.parse(...args); diff --git a/tools/node_modules/eslint/lib/rules/dot-notation.js b/tools/node_modules/eslint/lib/rules/dot-notation.js index 751b4628edc4d5..3aa9f3110f55f1 100644 --- a/tools/node_modules/eslint/lib/rules/dot-notation.js +++ b/tools/node_modules/eslint/lib/rules/dot-notation.js @@ -94,7 +94,7 @@ module.exports = { // Don't perform any fixes if there are comments inside the brackets. if (sourceCode.commentsExistBetween(leftBracket, rightBracket)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Replace the brackets by an identifier. @@ -154,12 +154,12 @@ module.exports = { // A statement that starts with `let[` is parsed as a destructuring variable declaration, not a MemberExpression. if (node.object.type === "Identifier" && node.object.name === "let" && !node.optional) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Don't perform any fixes if there are comments between the dot and the property name. if (sourceCode.commentsExistBetween(dotToken, node.property)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Replace the identifier to brackets. diff --git a/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js b/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js index ee5cfe3c8c7fd9..a01c0340821b20 100644 --- a/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js +++ b/tools/node_modules/eslint/lib/rules/prefer-arrow-callback.js @@ -295,7 +295,7 @@ module.exports = { * If the callback function has duplicates in its list of parameters (possible in sloppy mode), * don't replace it with an arrow function, because this is a SyntaxError with arrow functions. */ - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // Remove `.bind(this)` if exists. @@ -307,7 +307,7 @@ module.exports = { * E.g. `(foo || function(){}).bind(this)` */ if (memberNode.type !== "MemberExpression") { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } const callNode = memberNode.parent; @@ -320,12 +320,12 @@ module.exports = { * ^^^^^^^^^^^^ */ if (astUtils.isParenthesised(sourceCode, memberNode)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } // If comments exist in the `.bind(this)`, don't remove those. if (sourceCode.commentsExistBetween(firstTokenToRemove, lastTokenToRemove)) { - return; // eslint-disable-line eslint-plugin/fixer-return -- false positive + return; } yield fixer.removeRange([firstTokenToRemove.range[0], lastTokenToRemove.range[1]]); diff --git a/tools/node_modules/eslint/lib/rules/use-isnan.js b/tools/node_modules/eslint/lib/rules/use-isnan.js index 0c7e888c976d8d..ef95b21314a1ce 100644 --- a/tools/node_modules/eslint/lib/rules/use-isnan.js +++ b/tools/node_modules/eslint/lib/rules/use-isnan.js @@ -21,7 +21,10 @@ const astUtils = require("./utils/ast-utils"); * @returns {boolean} `true` if the node is 'NaN' identifier. */ function isNaNIdentifier(node) { - return Boolean(node) && node.type === "Identifier" && node.name === "NaN"; + return Boolean(node) && ( + astUtils.isSpecificId(node, "NaN") || + astUtils.isSpecificMemberAccess(node, "Number", "NaN") + ); } //------------------------------------------------------------------------------ diff --git a/tools/node_modules/eslint/lib/source-code/source-code.js b/tools/node_modules/eslint/lib/source-code/source-code.js index c13ce29b877a4a..cc4524fa74c806 100644 --- a/tools/node_modules/eslint/lib/source-code/source-code.js +++ b/tools/node_modules/eslint/lib/source-code/source-code.js @@ -349,7 +349,7 @@ class SourceCode extends TokenStore { let currentToken = this.getTokenBefore(node, { includeComments: true }); while (currentToken && isCommentToken(currentToken)) { - if (node.parent && (currentToken.start < node.parent.start)) { + if (node.parent && node.parent.type !== "Program" && (currentToken.start < node.parent.start)) { break; } comments.leading.push(currentToken); @@ -361,7 +361,7 @@ class SourceCode extends TokenStore { currentToken = this.getTokenAfter(node, { includeComments: true }); while (currentToken && isCommentToken(currentToken)) { - if (node.parent && (currentToken.end > node.parent.end)) { + if (node.parent && node.parent.type !== "Program" && (currentToken.end > node.parent.end)) { break; } comments.trailing.push(currentToken); diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE new file mode 100644 index 00000000000000..261eeb9e9f8b2b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md new file mode 100644 index 00000000000000..9cfe637bb3d2f7 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/README.md @@ -0,0 +1,258 @@ +# Config Array + +by [Nicholas C. Zakas](https://humanwhocodes.com) + +If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate). + +## Description + +A config array is a way of managing configurations that are based on glob pattern matching of filenames. Each config array contains the information needed to determine the correct configuration for any file based on the filename. + +## Background + +In 2019, I submitted an [ESLint RFC](https://github.com/eslint/rfcs/pull/9) proposing a new way of configuring ESLint. The goal was to streamline what had become an increasingly complicated configuration process. Over several iterations, this proposal was eventually born. + +The basic idea is that all configuration, including overrides, can be represented by a single array where each item in the array is a config object. Config objects appearing later in the array override config objects appearing earlier in the array. You can calculate a config for a given file by traversing all config objects in the array to find the ones that match the filename. Matching is done by specifying glob patterns in `files` and `ignores` properties on each config object. Here's an example: + +```js +export default [ + + // match all JSON files + { + name: "JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: "package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } +]; +``` + +In this example, there are two config objects: the first matches all JSON files in all directories and the second matches just `package.json` in the base path directory (all the globs are evaluated as relative to a base path that can be specified). When you retrieve a configuration for `foo.json`, only the first config object matches so `handler` is equal to `jsonHandler`; when you retrieve a configuration for `package.json`, `handler` is equal to `packageJsonHandler` (because both config objects match, the second one wins). + +## Installation + +You can install the package using npm or Yarn: + +```bash +npm install @humanwhocodes/config-array --save + +# or + +yarn add @humanwhocodes/config-array +``` + +## Usage + +First, import the `ConfigArray` constructor: + +```js +import { ConfigArray } from "@humanwhocodes/config-array"; + +// or using CommonJS + +const { ConfigArray } = require("@humanwhocodes/config-array"); +``` + +When you create a new instance of `ConfigArray`, you must pass in two arguments: an array of configs and an options object. The array of configs is most likely read in from a configuration file, so here's a typical example: + +```js +const configFilename = path.resolve(process.cwd(), "my.config.js"); +const { default: rawConfigs } = await import(configFilename); +const configs = new ConfigArray(rawConfigs, { + + // the path to match filenames from + basePath: process.cwd(), + + // additional items in each config + schema: mySchema +}); +``` + +This example reads in an object or array from `my.config.js` and passes it into the `ConfigArray` constructor as the first argument. The second argument is an object specifying the `basePath` (the directoy in which `my.config.js` is found) and a `schema` to define the additional properties of a config object beyond `files`, `ignores`, and `name`. + +### Specifying a Schema + +The `schema` option is required for you to use additional properties in config objects. The schema is object that follows the format of an [`ObjectSchema`](https://npmjs.com/package/@humanwhocodes/object-schema). The schema specifies both validation and merge rules that the `ConfigArray` instance needs to combine configs when there are multiple matches. Here's an example: + +```js +const configFilename = path.resolve(process.cwd(), "my.config.js"); +const { default: rawConfigs } = await import(configFilename); + +const mySchema = { + + // define the handler key in configs + handler: { + required: true, + merge(a, b) { + if (!b) return a; + if (!a) return b; + }, + validate(value) { + if (typeof value !== "function") { + throw new TypeError("Function expected."); + } + } + } +}; + +const configs = new ConfigArray(rawConfigs, { + + // the path to match filenames from + basePath: process.cwd(), + + // additional items in each config + schema: mySchema +}); +``` + +### Config Arrays + +Config arrays can be multidimensional, so it's possible for a config array to contain another config array, such as: + +```js +export default [ + + // JS config + { + files: ["**/*.js"], + handler: jsHandler + }, + + // JSON configs + [ + + // match all JSON files + { + name: "JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: "package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } + ], + + // filename must match function + { + files: [ filePath => filePath.endsWith(".md") ], + handler: markdownHandler + }, + + // filename must match all patterns in subarray + { + files: [ ["*.test.*", "*.js"] ], + handler: jsTestHandler + }, + + // filename must not match patterns beginning with ! + { + name: "Non-JS files", + files: ["!*.js"], + settings: { + js: false + } + } +]; +``` + +In this example, the array contains both config objects and a config array. When a config array is normalized (see details below), it is flattened so only config objects remain. However, the order of evaluation remains the same. + +If the `files` array contains a function, then that function is called with the absolute path of the file and is expected to return `true` if there is a match and `false` if not. (The `ignores` array can also contain functions.) + +If the `files` array contains an item that is an array of strings and functions, then all patterns must match in order for the config to match. In the preceding examples, both `*.test.*` and `*.js` must match in order for the config object to be used. + +If a pattern in the files array begins with `!` then it excludes that pattern. In the preceding example, any filename that doesn't end with `.js` will automatically getting a `settings.js` property set to `false`. + +### Config Functions + +Config arrays can also include config functions. A config function accepts a single parameter, `context` (defined by you), and must return either a config object or a config array (it cannot return another function). Config functions allow end users to execute code in the creation of appropriate config objects. Here's an example: + +```js +export default [ + + // JS config + { + files: ["**/*.js"], + handler: jsHandler + }, + + // JSON configs + function (context) { + return [ + + // match all JSON files + { + name: context.name + " JSON Handler", + files: ["**/*.json"], + handler: jsonHandler + }, + + // match only package.json + { + name: context.name + " package.json Handler", + files: ["package.json"], + handler: packageJsonHandler + } + ]; + } +]; +``` + +When a config array is normalized, each function is executed and replaced in the config array with the return value. + +**Note:** Config functions cannot be async. This will be added in a future version. + +### Normalizing Config Arrays + +Once a config array has been created and loaded with all of the raw config data, it must be normalized before it can be used. The normalization process goes through and flattens the config array as well as executing all config functions to get their final values. + +To normalize a config array, call the `normalize()` method and pass in a context object: + +```js +await configs.normalize({ + name: "MyApp" +}); +``` + +The `normalize()` method returns a promise, so be sure to use the `await` operator. The config array instance is normalized in-place, so you don't need to create a new variable. + +**Important:** Once a `ConfigArray` is normalized, it cannot be changed further. You can, however, create a new `ConfigArray` and pass in the normalized instance to create an unnormalized copy. + +### Getting Config for a File + +To get the config for a file, use the `getConfig()` method on a normalized config array and pass in the filename to get a config for: + +```js +// pass in absolute filename +const fileConfig = configs.getConfig(path.resolve(process.cwd(), "package.json")); +``` + +The config array always returns an object, even if there are no configs matching the given filename. You can then inspect the returned config object to determine how to proceed. + +A few things to keep in mind: + +* You must pass in the absolute filename to get a config for. +* The returned config object never has `files`, `ignores`, or `name` properties; the only properties on the object will be the other configuration options specified. +* The config array caches configs, so subsequent calls to `getConfig()` with the same filename will return in a fast lookup rather than another calculation. + +## Acknowledgements + +The design of this project was influenced by feedback on the ESLint RFC, and incorporates ideas from: + +* Teddy Katz (@not-an-aardvark) +* Toru Nagashima (@mysticatea) +* Kai Cataldo (@kaicataldo) + +## License + +Apache 2.0 diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js new file mode 100644 index 00000000000000..a9aacf46b5cd4b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/api.js @@ -0,0 +1,457 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var path = _interopDefault(require('path')); +var minimatch = _interopDefault(require('minimatch')); +var createDebug = _interopDefault(require('debug')); +var objectSchema = require('@humanwhocodes/object-schema'); + +/** + * @fileoverview ConfigSchema + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +/** + * Assets that a given value is an array. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array. + */ +function assertIsArray(value) { + if (!Array.isArray(value)) { + throw new TypeError('Expected value to be an array.'); + } +} + +/** + * Assets that a given value is an array containing only strings and functions. + * @param {*} value The value to check. + * @returns {void} + * @throws {TypeError} When the value is not an array of strings and functions. + */ +function assertIsArrayOfStringsAndFunctions(value, name) { + assertIsArray(value); + + if (value.some(item => typeof item !== 'string' && typeof item !== 'function')) { + throw new TypeError('Expected array to only contain strings.'); + } +} + +//------------------------------------------------------------------------------ +// Exports +//------------------------------------------------------------------------------ + +/** + * The base schema that every ConfigArray uses. + * @type Object + */ +const baseSchema = Object.freeze({ + name: { + required: false, + merge() { + return undefined; + }, + validate(value) { + if (typeof value !== 'string') { + throw new TypeError('Property must be a string.'); + } + } + }, + files: { + required: false, + merge() { + return undefined; + }, + validate(value) { + + // first check if it's an array + assertIsArray(value); + + // then check each member + value.forEach(item => { + if (Array.isArray(item)) { + assertIsArrayOfStringsAndFunctions(item); + } else if (typeof item !== 'string' && typeof item !== 'function') { + throw new TypeError('Items must be a string, a function, or an array of strings and functions.'); + } + }); + + } + }, + ignores: { + required: false, + merge() { + return undefined; + }, + validate: assertIsArrayOfStringsAndFunctions + } +}); + +/** + * @fileoverview ConfigArray + * @author Nicholas C. Zakas + */ + +//------------------------------------------------------------------------------ +// Helpers +//------------------------------------------------------------------------------ + +const debug = createDebug('@hwc/config-array'); + +const MINIMATCH_OPTIONS = { + matchBase: true +}; + +/** + * Shorthand for checking if a value is a string. + * @param {any} value The value to check. + * @returns {boolean} True if a string, false if not. + */ +function isString(value) { + return typeof value === 'string'; +} + +/** + * Normalizes a `ConfigArray` by flattening it and executing any functions + * that are found inside. + * @param {Array} items The items in a `ConfigArray`. + * @param {Object} context The context object to pass into any function + * found. + * @returns {Array} A flattened array containing only config objects. + * @throws {TypeError} When a config function returns a function. + */ +async function normalize(items, context) { + + // TODO: Allow async config functions + + function *flatTraverse(array) { + for (let item of array) { + if (typeof item === 'function') { + item = item(context); + } + + if (Array.isArray(item)) { + yield * flatTraverse(item); + } else if (typeof item === 'function') { + throw new TypeError('A config function can only return an object or array.'); + } else { + yield item; + } + } + } + + return [...flatTraverse(items)]; +} + +/** + * Determines if a given file path is matched by a config. If the config + * has no `files` field, then it matches; otherwise, if a `files` field + * is present then we match the globs in `files` and exclude any globs in + * `ignores`. + * @param {string} filePath The absolute file path to check. + * @param {Object} config The config object to check. + * @returns {boolean} True if the file path is matched by the config, + * false if not. + */ +function pathMatches(filePath, basePath, config) { + + // a config without a `files` field always matches + if (!config.files) { + return true; + } + + // if files isn't an array, throw an error + if (!Array.isArray(config.files) || config.files.length === 0) { + throw new TypeError('The files key must be a non-empty array.'); + } + + const relativeFilePath = path.relative(basePath, filePath); + + // match both strings and functions + const match = pattern => { + if (isString(pattern)) { + return minimatch(relativeFilePath, pattern, MINIMATCH_OPTIONS); + } + + if (typeof pattern === 'function') { + return pattern(filePath); + } + }; + + // check for all matches to config.files + let matches = config.files.some(pattern => { + if (Array.isArray(pattern)) { + return pattern.every(match); + } + + return match(pattern); + }); + + /* + * If the file path matches the config.files patterns, then check to see + * if there are any files to ignore. + */ + if (matches && config.ignores) { + matches = !config.ignores.some(pattern => { + return minimatch(filePath, pattern, MINIMATCH_OPTIONS); + }); + } + + return matches; +} + +/** + * Ensures that a ConfigArray has been normalized. + * @param {ConfigArray} configArray The ConfigArray to check. + * @returns {void} + * @throws {Error} When the `ConfigArray` is not normalized. + */ +function assertNormalized(configArray) { + // TODO: Throw more verbose error + if (!configArray.isNormalized()) { + throw new Error('ConfigArray must be normalized to perform this operation.'); + } +} + +//------------------------------------------------------------------------------ +// Public Interface +//------------------------------------------------------------------------------ + +const ConfigArraySymbol = { + isNormalized: Symbol('isNormalized'), + configCache: Symbol('configCache'), + schema: Symbol('schema'), + finalizeConfig: Symbol('finalizeConfig'), + preprocessConfig: Symbol('preprocessConfig') +}; + +/** + * Represents an array of config objects and provides method for working with + * those config objects. + */ +class ConfigArray extends Array { + + /** + * Creates a new instance of ConfigArray. + * @param {Iterable|Function|Object} configs An iterable yielding config + * objects, or a config function, or a config object. + * @param {string} [options.basePath=""] The path of the config file + * @param {boolean} [options.normalized=false] Flag indicating if the + * configs have already been normalized. + * @param {Object} [options.schema] The additional schema + * definitions to use for the ConfigArray schema. + */ + constructor(configs, { basePath = '', normalized = false, schema: customSchema } = {}) { + super(); + + /** + * Tracks if the array has been normalized. + * @property isNormalized + * @type boolean + * @private + */ + this[ConfigArraySymbol.isNormalized] = normalized; + + /** + * The schema used for validating and merging configs. + * @property schema + * @type ObjectSchema + * @private + */ + this[ConfigArraySymbol.schema] = new objectSchema.ObjectSchema({ + ...customSchema, + ...baseSchema + }); + + /** + * The path of the config file that this array was loaded from. + * This is used to calculate filename matches. + * @property basePath + * @type string + */ + this.basePath = basePath; + + /** + * A cache to store calculated configs for faster repeat lookup. + * @property configCache + * @type Map + * @private + */ + this[ConfigArraySymbol.configCache] = new Map(); + + // load the configs into this array + if (Array.isArray(configs)) { + this.push(...configs); + } else { + this.push(configs); + } + + } + + /** + * Prevent normal array methods from creating a new `ConfigArray` instance. + * This is to ensure that methods such as `slice()` won't try to create a + * new instance of `ConfigArray` behind the scenes as doing so may throw + * an error due to the different constructor signature. + * @returns {Function} The `Array` constructor. + */ + static get [Symbol.species]() { + return Array; + } + + /** + * Returns the `files` globs from every config object in the array. + * Negated patterns (those beginning with `!`) are not returned. + * This can be used to determine which files will be matched by a + * config array or to use as a glob pattern when no patterns are provided + * for a command line interface. + * @returns {string[]} An array of string patterns. + */ + get files() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.files) { + config.files.forEach(filePattern => { + if (Array.isArray(filePattern)) { + result.push(...filePattern.filter(pattern => { + return isString(pattern) && !pattern.startsWith('!'); + })); + } else if (isString(filePattern) && !filePattern.startsWith('!')) { + result.push(filePattern); + } + }); + } + } + + return result; + } + + /** + * Returns the file globs that should always be ignored regardless of + * the matching `files` fields in any configs. This is necessary to mimic + * the behavior of things like .gitignore and .eslintignore, allowing a + * globbing operation to be faster. + * @returns {string[]} An array of string patterns to be ignored. + */ + get ignores() { + + assertNormalized(this); + + const result = []; + + for (const config of this) { + if (config.ignores && !config.files) { + result.push(...config.ignores.filter(isString)); + } + } + + return result; + } + + /** + * Indicates if the config array has been normalized. + * @returns {boolean} True if the config array is normalized, false if not. + */ + isNormalized() { + return this[ConfigArraySymbol.isNormalized]; + } + + /** + * Normalizes a config array by flattening embedded arrays and executing + * config functions. + * @param {ConfigContext} context The context object for config functions. + * @returns {ConfigArray} A new ConfigArray instance that is normalized. + */ + async normalize(context = {}) { + + if (!this.isNormalized()) { + const normalizedConfigs = await normalize(this, context); + this.length = 0; + this.push(...normalizedConfigs.map(this[ConfigArraySymbol.preprocessConfig])); + this[ConfigArraySymbol.isNormalized] = true; + + // prevent further changes + Object.freeze(this); + } + + return this; + } + + /** + * Finalizes the state of a config before being cached and returned by + * `getConfig()`. Does nothing by default but is provided to be + * overridden by subclasses as necessary. + * @param {Object} config The config to finalize. + * @returns {Object} The finalized config. + */ + [ConfigArraySymbol.finalizeConfig](config) { + return config; + } + + /** + * Preprocesses a config during the normalization process. This is the + * method to override if you want to convert an array item before it is + * validated for the first time. For example, if you want to replace a + * string with an object, this is the method to override. + * @param {Object} config The config to preprocess. + * @returns {Object} The config to use in place of the argument. + */ + [ConfigArraySymbol.preprocessConfig](config) { + return config; + } + + /** + * Returns the config object for a given file path. + * @param {string} filePath The complete path of a file to get a config for. + * @returns {Object} The config object for this file. + */ + getConfig(filePath) { + + assertNormalized(this); + + // first check the cache to avoid duplicate work + let finalConfig = this[ConfigArraySymbol.configCache].get(filePath); + + if (finalConfig) { + return finalConfig; + } + + // No config found in cache, so calculate a new one + + const matchingConfigs = []; + + for (const config of this) { + if (pathMatches(filePath, this.basePath, config)) { + debug(`Matching config found for ${filePath}`); + matchingConfigs.push(config); + } else { + debug(`No matching config found for ${filePath}`); + } + } + + finalConfig = matchingConfigs.reduce((result, config) => { + return this[ConfigArraySymbol.schema].merge(result, config); + }, {}, this); + + finalConfig = this[ConfigArraySymbol.finalizeConfig](finalConfig); + + this[ConfigArraySymbol.configCache].set(filePath, finalConfig); + + return finalConfig; + } + +} + +exports.ConfigArray = ConfigArray; +exports.ConfigArraySymbol = ConfigArraySymbol; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json new file mode 100644 index 00000000000000..4dc3a4e5d48ab4 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/config-array/package.json @@ -0,0 +1,61 @@ +{ + "name": "@humanwhocodes/config-array", + "version": "0.5.0", + "description": "Glob-based configuration matching.", + "author": "Nicholas C. Zakas", + "main": "api.js", + "files": [ + "api.js" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/config-array.git" + }, + "bugs": { + "url": "https://github.com/humanwhocodes/config-array/issues" + }, + "homepage": "https://github.com/humanwhocodes/config-array#readme", + "scripts": { + "build": "rollup -c", + "format": "nitpik", + "lint": "eslint *.config.js src/*.js tests/*.js", + "prepublish": "npm run build", + "test:coverage": "nyc --include src/*.js npm run test", + "test": "mocha -r esm tests/ --recursive" + }, + "gitHooks": { + "pre-commit": "lint-staged" + }, + "lint-staged": { + "*.js": [ + "nitpik", + "eslint --fix --ignore-pattern '!.eslintrc.js'" + ] + }, + "keywords": [ + "configuration", + "configarray", + "config file" + ], + "license": "Apache-2.0", + "engines": { + "node": ">=10.10.0" + }, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "devDependencies": { + "@nitpik/javascript": "^0.3.3", + "@nitpik/node": "0.0.5", + "chai": "^4.2.0", + "eslint": "^6.7.1", + "esm": "^3.2.25", + "lint-staged": "^10.2.8", + "mocha": "^6.1.4", + "nyc": "^14.1.1", + "rollup": "^1.12.3", + "yorkie": "^2.0.0" + } +} \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE new file mode 100644 index 00000000000000..a5e3ae46fdfc2b --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Human Who Codes +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md new file mode 100644 index 00000000000000..2163797f8fe15a --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/README.md @@ -0,0 +1,234 @@ +# JavaScript ObjectSchema Package + +by [Nicholas C. Zakas](https://humanwhocodes.com) + +If you find this useful, please consider supporting my work with a [donation](https://humanwhocodes.com/donate). + +## Overview + +A JavaScript object merge/validation utility where you can define a different merge and validation strategy for each key. This is helpful when you need to validate complex data structures and then merge them in a way that is more complex than `Object.assign()`. + +## Installation + +You can install using either npm: + +``` +npm install @humanwhocodes/object-schema +``` + +Or Yarn: + +``` +yarn add @humanwhocodes/object-schema +``` + +## Usage + +Use CommonJS to get access to the `ObjectSchema` constructor: + +```js +const { ObjectSchema } = require("@humanwhocodes/object-schema"); + +const schema = new ObjectSchema({ + + // define a definition for the "downloads" key + downloads: { + required: true, + merge(value1, value2) { + return value1 + value2; + }, + validate(value) { + if (typeof value !== "number") { + throw new Error("Expected downloads to be a number."); + } + } + }, + + // define a strategy for the "versions" key + version: { + required: true, + merge(value1, value2) { + return value1.concat(value2); + }, + validate(value) { + if (!Array.isArray(value)) { + throw new Error("Expected versions to be an array."); + } + } + } +}); + +const record1 = { + downloads: 25, + versions: [ + "v1.0.0", + "v1.1.0", + "v1.2.0" + ] +}; + +const record2 = { + downloads: 125, + versions: [ + "v2.0.0", + "v2.1.0", + "v3.0.0" + ] +}; + +// make sure the records are valid +schema.validate(record1); +schema.validate(record2); + +// merge together (schema.merge() accepts any number of objects) +const result = schema.merge(record1, record2); + +// result looks like this: + +const result = { + downloads: 75, + versions: [ + "v1.0.0", + "v1.1.0", + "v1.2.0", + "v2.0.0", + "v2.1.0", + "v3.0.0" + ] +}; +``` + +## Tips and Tricks + +### Named merge strategies + +Instead of specifying a `merge()` method, you can specify one of the following strings to use a default merge strategy: + +* `"assign"` - use `Object.assign()` to merge the two values into one object. +* `"overwrite"` - the second value always replaces the first. +* `"replace"` - the second value replaces the first if the second is not `undefined`. + +For example: + +```js +const schema = new ObjectSchema({ + name: { + merge: "replace", + validate() {} + } +}); +``` + +### Named validation strategies + +Instead of specifying a `validate()` method, you can specify one of the following strings to use a default validation strategy: + +* `"array"` - value must be an array. +* `"boolean"` - value must be a boolean. +* `"number"` - value must be a number. +* `"object"` - value must be an object. +* `"object?"` - value must be an object or null. +* `"string"` - value must be a string. +* `"string!"` - value must be a non-empty string. + +For example: + +```js +const schema = new ObjectSchema({ + name: { + merge: "replace", + validate: "string" + } +}); +``` + +### Subschemas + +If you are defining a key that is, itself, an object, you can simplify the process by using a subschema. Instead of defining `merge()` and `validate()`, assign a `schema` key that contains a schema definition, like this: + +```js +const schema = new ObjectSchema({ + name: { + schema: { + first: { + merge: "replace", + validate: "string" + }, + last: { + merge: "replace", + validate: "string" + } + } + } +}); + +schema.validate({ + name: { + first: "n", + last: "z" + } +}); +``` + +### Remove Keys During Merge + +If the merge strategy for a key returns `undefined`, then the key will not appear in the final object. For example: + +```js +const schema = new ObjectSchema({ + date: { + merge() { + return undefined; + }, + validate(value) { + Date.parse(value); // throws an error when invalid + } + } +}); + +const object1 = { date: "5/5/2005" }; +const object2 = { date: "6/6/2006" }; + +const result = schema.merge(object1, object2); + +console.log("date" in result); // false +``` + +### Requiring Another Key Be Present + +If you'd like the presence of one key to require the presence of another key, you can use the `requires` property to specify an array of other properties that any key requires. For example: + +```js +const schema = new ObjectSchema(); + +const schema = new ObjectSchema({ + date: { + merge() { + return undefined; + }, + validate(value) { + Date.parse(value); // throws an error when invalid + } + }, + time: { + requires: ["date"], + merge(first, second) { + return second; + }, + validate(value) { + // ... + } + } +}); + +// throws error: Key "time" requires keys "date" +schema.validate({ + time: "13:45" +}); +``` + +In this example, even though `date` is an optional key, it is required to be present whenever `time` is present. + +## License + +BSD 3-Clause diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json new file mode 100644 index 00000000000000..ba829090e55bd4 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/package.json @@ -0,0 +1,33 @@ +{ + "name": "@humanwhocodes/object-schema", + "version": "1.2.0", + "description": "An object schema merger/validator", + "main": "src/index.js", + "directories": { + "test": "tests" + }, + "scripts": { + "test": "mocha tests/" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/humanwhocodes/object-schema.git" + }, + "keywords": [ + "object", + "validation", + "schema", + "merge" + ], + "author": "Nicholas C. Zakas", + "license": "BSD-3-Clause", + "bugs": { + "url": "https://github.com/humanwhocodes/object-schema/issues" + }, + "homepage": "https://github.com/humanwhocodes/object-schema#readme", + "devDependencies": { + "chai": "^4.2.0", + "eslint": "^5.13.0", + "mocha": "^5.2.0" + } +} \ No newline at end of file diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js new file mode 100644 index 00000000000000..b2bc4fb96e3cb3 --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/index.js @@ -0,0 +1,7 @@ +/** + * @filedescription Object Schema Package + */ + +exports.ObjectSchema = require("./object-schema").ObjectSchema; +exports.MergeStrategy = require("./merge-strategy").MergeStrategy; +exports.ValidationStrategy = require("./validation-strategy").ValidationStrategy; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js new file mode 100644 index 00000000000000..82174492764a9d --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/merge-strategy.js @@ -0,0 +1,53 @@ +/** + * @filedescription Merge Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different merge strategies. + */ +class MergeStrategy { + + /** + * Merges two keys by overwriting the first with the second. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value. + */ + static overwrite(value1, value2) { + return value2; + } + + /** + * Merges two keys by replacing the first with the second only if the + * second is defined. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} The second value if it is defined. + */ + static replace(value1, value2) { + if (typeof value2 !== "undefined") { + return value2; + } + + return value1; + } + + /** + * Merges two properties by assigning properties from the second to the first. + * @param {*} value1 The value from the first object key. + * @param {*} value2 The value from the second object key. + * @returns {*} A new object containing properties from both value1 and + * value2. + */ + static assign(value1, value2) { + return Object.assign({}, value1, value2); + } +} + +exports.MergeStrategy = MergeStrategy; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js new file mode 100644 index 00000000000000..25864f5a280cbb --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/object-schema.js @@ -0,0 +1,239 @@ +/** + * @filedescription Object Schema + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Requirements +//----------------------------------------------------------------------------- + +const { MergeStrategy } = require("./merge-strategy"); +const { ValidationStrategy } = require("./validation-strategy"); + +//----------------------------------------------------------------------------- +// Private +//----------------------------------------------------------------------------- + +const strategies = Symbol("strategies"); +const requiredKeys = Symbol("requiredKeys"); + +/** + * Validates a schema strategy. + * @param {string} name The name of the key this strategy is for. + * @param {Object} strategy The strategy for the object key. + * @param {boolean} [strategy.required=true] Whether the key is required. + * @param {string[]} [strategy.requires] Other keys that are required when + * this key is present. + * @param {Function} strategy.merge A method to call when merging two objects + * with the same key. + * @param {Function} strategy.validate A method to call when validating an + * object with the key. + * @returns {void} + * @throws {Error} When the strategy is missing a name. + * @throws {Error} When the strategy is missing a merge() method. + * @throws {Error} When the strategy is missing a validate() method. + */ +function validateDefinition(name, strategy) { + + let hasSchema = false; + if (strategy.schema) { + if (typeof strategy.schema === "object") { + hasSchema = true; + } else { + throw new TypeError("Schema must be an object."); + } + } + + if (typeof strategy.merge === "string") { + if (!(strategy.merge in MergeStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid merge strategy.`); + } + } else if (!hasSchema && typeof strategy.merge !== "function") { + throw new TypeError(`Definition for key "${name}" must have a merge property.`); + } + + if (typeof strategy.validate === "string") { + if (!(strategy.validate in ValidationStrategy)) { + throw new TypeError(`Definition for key "${name}" missing valid validation strategy.`); + } + } else if (!hasSchema && typeof strategy.validate !== "function") { + throw new TypeError(`Definition for key "${name}" must have a validate() method.`); + } +} + + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Represents an object validation/merging schema. + */ +class ObjectSchema { + + /** + * Creates a new instance. + */ + constructor(definitions) { + + if (!definitions) { + throw new Error("Schema definitions missing."); + } + + /** + * Track all strategies in the schema by key. + * @type {Map} + * @property strategies + */ + this[strategies] = new Map(); + + /** + * Separately track any keys that are required for faster validation. + * @type {Map} + * @property requiredKeys + */ + this[requiredKeys] = new Map(); + + // add in all strategies + for (const key of Object.keys(definitions)) { + validateDefinition(key, definitions[key]); + + // normalize merge and validate methods if subschema is present + if (typeof definitions[key].schema === "object") { + const schema = new ObjectSchema(definitions[key].schema); + definitions[key] = { + ...definitions[key], + merge(first, second) { + if (first && second) { + return schema.merge(first, second); + } + + return MergeStrategy.assign(first, second); + }, + validate(value) { + ValidationStrategy.object(value); + schema.validate(value); + } + }; + } + + // normalize the merge method in case there's a string + if (typeof definitions[key].merge === "string") { + definitions[key] = { + ...definitions[key], + merge: MergeStrategy[definitions[key].merge] + }; + }; + + // normalize the validate method in case there's a string + if (typeof definitions[key].validate === "string") { + definitions[key] = { + ...definitions[key], + validate: ValidationStrategy[definitions[key].validate] + }; + }; + + this[strategies].set(key, definitions[key]); + + if (definitions[key].required) { + this[requiredKeys].set(key, definitions[key]); + } + } + } + + /** + * Determines if a strategy has been registered for the given object key. + * @param {string} key The object key to find a strategy for. + * @returns {boolean} True if the key has a strategy registered, false if not. + */ + hasKey(key) { + return this[strategies].has(key); + } + + /** + * Merges objects together to create a new object comprised of the keys + * of the all objects. Keys are merged based on the each key's merge + * strategy. + * @param {...Object} objects The objects to merge. + * @returns {Object} A new object with a mix of all objects' keys. + * @throws {Error} If any object is invalid. + */ + merge(...objects) { + + // double check arguments + if (objects.length < 2) { + throw new Error("merge() requires at least two arguments."); + } + + if (objects.some(object => (object == null || typeof object !== "object"))) { + throw new Error("All arguments must be objects."); + } + + return objects.reduce((result, object) => { + + this.validate(object); + + for (const [key, strategy] of this[strategies]) { + try { + if (key in result || key in object) { + const value = strategy.merge.call(this, result[key], object[key]); + if (value !== undefined) { + result[key] = value; + } + } + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + return result; + }, {}); + } + + /** + * Validates an object's keys based on the validate strategy for each key. + * @param {Object} object The object to validate. + * @returns {void} + * @throws {Error} When the object is invalid. + */ + validate(object) { + + // check existing keys first + for (const key of Object.keys(object)) { + + // check to see if the key is defined + if (!this.hasKey(key)) { + throw new Error(`Unexpected key "${key}" found.`); + } + + // validate existing keys + const strategy = this[strategies].get(key); + + // first check to see if any other keys are required + if (Array.isArray(strategy.requires)) { + if (!strategy.requires.every(otherKey => otherKey in object)) { + throw new Error(`Key "${key}" requires keys "${strategy.requires.join("\", \"")}".`); + } + } + + // now apply remaining validation strategy + try { + strategy.validate.call(strategy, object[key]); + } catch (ex) { + ex.message = `Key "${key}": ` + ex.message; + throw ex; + } + } + + // ensure required keys aren't missing + for (const [key] of this[requiredKeys]) { + if (!(key in object)) { + throw new Error(`Missing required key "${key}".`); + } + } + + } +} + +exports.ObjectSchema = ObjectSchema; diff --git a/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js new file mode 100644 index 00000000000000..ecf918bdd17b7f --- /dev/null +++ b/tools/node_modules/eslint/node_modules/@humanwhocodes/object-schema/src/validation-strategy.js @@ -0,0 +1,102 @@ +/** + * @filedescription Validation Strategy + */ + +"use strict"; + +//----------------------------------------------------------------------------- +// Class +//----------------------------------------------------------------------------- + +/** + * Container class for several different validation strategies. + */ +class ValidationStrategy { + + /** + * Validates that a value is an array. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static array(value) { + if (!Array.isArray(value)) { + throw new TypeError("Expected an array."); + } + } + + /** + * Validates that a value is a boolean. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static boolean(value) { + if (typeof value !== "boolean") { + throw new TypeError("Expected a Boolean."); + } + } + + /** + * Validates that a value is a number. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static number(value) { + if (typeof value !== "number") { + throw new TypeError("Expected a number."); + } + } + + /** + * Validates that a value is a object. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static object(value) { + if (!value || typeof value !== "object") { + throw new TypeError("Expected an object."); + } + } + + /** + * Validates that a value is a object or null. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "object?"(value) { + if (typeof value !== "object") { + throw new TypeError("Expected an object or null."); + } + } + + /** + * Validates that a value is a string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static string(value) { + if (typeof value !== "string") { + throw new TypeError("Expected a string."); + } + } + + /** + * Validates that a value is a non-empty string. + * @param {*} value The value to validate. + * @returns {void} + * @throws {TypeError} If the value is invalid. + */ + static "string!"(value) { + if (typeof value !== "string" || value.length === 0) { + throw new TypeError("Expected a non-empty string."); + } + } + +} + +exports.ValidationStrategy = ValidationStrategy; diff --git a/tools/node_modules/eslint/node_modules/flatted/README.md b/tools/node_modules/eslint/node_modules/flatted/README.md index 0c1627f7d2dda8..8fd5b4d82f4459 100644 --- a/tools/node_modules/eslint/node_modules/flatted/README.md +++ b/tools/node_modules/eslint/node_modules/flatted/README.md @@ -18,10 +18,10 @@ Usable via [CDN](https://unpkg.com/flatted) or as regular module. ```js // ESM -import {parse, stringify} from 'flatted'; +import {parse, stringify, toJSON, fromJSON} from 'flatted'; // CJS -const {parse, stringify} = require('flatted'); +const {parse, stringify, toJSON, fromJSON} = require('flatted'); const a = [{}]; a[0].a = a; @@ -30,6 +30,34 @@ a.push(a); stringify(a); // [["1","0"],{"a":"0"}] ``` +## toJSON and from JSON + +If you'd like to implicitly survive JSON serialization, these two helpers helps: + +```js +import {toJSON, fromJSON} from 'flatted'; + +class RecursiveMap extends Map { + static fromJSON(any) { + return new this(fromJSON(any)); + } + toJSON() { + return toJSON([...this.entries()]); + } +} + +const recursive = new RecursiveMap; +const same = {}; +same.same = same; +recursive.set('same', same); + +const asString = JSON.stringify(recursive); +const asMap = RecursiveMap.fromJSON(JSON.parse(asString)); +asMap.get('same') === asMap.get('same').same; +// true +``` + + ## Flatted VS JSON As it is for every other specialized format capable of serializing and deserializing circular data, you should never `JSON.parse(Flatted.stringify(data))`, and you should never `Flatted.parse(JSON.stringify(data))`. diff --git a/tools/node_modules/eslint/node_modules/flatted/cjs/index.js b/tools/node_modules/eslint/node_modules/flatted/cjs/index.js index ea7ac895154e46..c2c94866d33659 100644 --- a/tools/node_modules/eslint/node_modules/flatted/cjs/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/cjs/index.js @@ -92,3 +92,8 @@ const stringify = (value, replacer, space) => { } }; exports.stringify = stringify; + +const toJSON = any => $parse(stringify(any)); +exports.toJSON = toJSON; +const fromJSON = any => parse($stringify(any)); +exports.fromJSON = fromJSON; diff --git a/tools/node_modules/eslint/node_modules/flatted/es.js b/tools/node_modules/eslint/node_modules/flatted/es.js index aee7158d2f34f2..8e4cf0cf4aeb53 100644 --- a/tools/node_modules/eslint/node_modules/flatted/es.js +++ b/tools/node_modules/eslint/node_modules/flatted/es.js @@ -1,2 +1,2 @@ self.Flatted=function(t){"use strict"; -/*! (c) 2020 Andrea Giammarchi */const{parse:e,stringify:n}=JSON,{keys:r}=Object,s=String,c="string",l={},o="object",a=(t,e)=>e,i=t=>t instanceof s?s(t):t,f=(t,e)=>typeof e===c?new s(e):e,u=(t,e,n,c)=>{const a=[];for(let i=r(n),{length:f}=i,u=0;u{const r=s(e.push(n)-1);return t.set(n,r),r};return t.parse=(t,n)=>{const r=e(t,f).map(i),s=r[0],c=n||a,l=typeof s===o&&s?u(r,new Set,s,c):s;return c.call({"":l},"",l)},t.stringify=(t,e,r)=>{const s=e&&typeof e===o?(t,n)=>""===t||-1e,f=t=>t instanceof s?s(t):t,i=(t,e)=>typeof e===o?new s(e):e,u=(t,e,n,o)=>{const a=[];for(let f=r(n),{length:i}=f,u=0;u{const r=s(e.push(n)-1);return t.set(n,r),r},y=(t,n)=>{const r=e(t,i).map(f),s=r[0],o=n||a,c=typeof s===l&&s?u(r,new Set,s,o):s;return o.call({"":c},"",c)},g=(t,e,r)=>{const s=e&&typeof e===l?(t,n)=>""===t||-1y(n(t)),t.parse=y,t.stringify=g,t.toJSON=t=>e(g(t)),t}({}); diff --git a/tools/node_modules/eslint/node_modules/flatted/esm/index.js b/tools/node_modules/eslint/node_modules/flatted/esm/index.js index f220bbf7a31109..9a8c239c203ce9 100644 --- a/tools/node_modules/eslint/node_modules/flatted/esm/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/esm/index.js @@ -89,3 +89,6 @@ export const stringify = (value, replacer, space) => { return after; } }; + +export const toJSON = any => $parse(stringify(any)); +export const fromJSON = any => parse($stringify(any)); diff --git a/tools/node_modules/eslint/node_modules/flatted/index.js b/tools/node_modules/eslint/node_modules/flatted/index.js index 2cf0eeb52abc8a..d170879e50ccf8 100644 --- a/tools/node_modules/eslint/node_modules/flatted/index.js +++ b/tools/node_modules/eslint/node_modules/flatted/index.js @@ -110,9 +110,17 @@ self.Flatted = (function (exports) { return after; } }; + var toJSON = function toJSON(any) { + return $parse(stringify(any)); + }; + var fromJSON = function fromJSON(any) { + return parse($stringify(any)); + }; + exports.fromJSON = fromJSON; exports.parse = parse; exports.stringify = stringify; + exports.toJSON = toJSON; return exports; diff --git a/tools/node_modules/eslint/node_modules/flatted/min.js b/tools/node_modules/eslint/node_modules/flatted/min.js index 64372fe4081620..a822de22434f24 100644 --- a/tools/node_modules/eslint/node_modules/flatted/min.js +++ b/tools/node_modules/eslint/node_modules/flatted/min.js @@ -1,2 +1,2 @@ self.Flatted=function(n){"use strict"; -/*! (c) 2020 Andrea Giammarchi */var t=JSON.parse,r=JSON.stringify,e=Object.keys,a=String,u="string",f={},i="object",c=function(n,t){return t},l=function(n){return n instanceof a?a(n):n},o=function(n,t){return typeof t===u?new a(t):t},s=function(n,t,r){var e=a(t.push(r)-1);return n.set(r,e),e};return n.parse=function(n,r){var u=t(n,o).map(l),s=u[0],p=r||c,v=typeof s===i&&s?function n(t,r,u,c){for(var l=[],o=e(u),s=o.length,p=0;pvalue : $value; } @@ -76,42 +76,25 @@ private static function keys(&$value) { $obj = new ReflectionObject($value); $props = $obj->getProperties(); $keys = array(); - foreach ($props as $prop) { + foreach ($props as $prop) $keys[] = $prop->getName(); - } return $keys; } private static function loop($obj, $keys, &$input, &$set, &$output) { foreach ($keys as $key) { $value = $obj ? $output->$key : $output[$key]; - if ($value instanceof FlattedString) { + if ($value instanceof FlattedString) Flatted::ref($obj, $key, $input[$value->value], $input, $set, $output); - } } return $output; } private static function relate(&$known, &$input, &$value) { - if (is_string($value)) { - $key = array_search($value, $known->key, true); - if ($key !== false) { - return $known->value[$key]; - } - return Flatted::index($known, $input, $value); - } - if (is_array($value)) { + if (is_string($value) || is_array($value) || is_object($value)) { $key = array_search($value, $known->key, true); - if ($key !== false) { + if ($key !== false) return $known->value[$key]; - } - return Flatted::index($known, $input, $value); - } - if (is_object($value)) { - $key = array_search($value, $known->key, true); - if ($key !== false) { - return $known->value[$key]; - } return Flatted::index($known, $input, $value); } return $value; @@ -137,7 +120,7 @@ private static function ref($obj, &$key, &$value, &$input, &$set, &$output) { private static function transform(&$known, &$input, &$value) { if (is_array($value)) { return array_map( - function (&$value) use(&$known, &$input) { + function ($value) use(&$known, &$input) { return Flatted::relate($known, $input, $value); }, $value @@ -146,15 +129,14 @@ function (&$value) use(&$known, &$input) { if (is_object($value)) { $object = new stdClass; $keys = Flatted::keys($value); - foreach ($keys as $key) { + foreach ($keys as $key) $object->$key = Flatted::relate($known, $input, $value->$key); - } return $object; } return $value; } - private static function wrap(&$value) { + private static function wrap($value) { if (is_string($value)) { return new FlattedString($value); } diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json index f556a5503561e2..ac3a78cc3b0f30 100644 --- a/tools/node_modules/eslint/package.json +++ b/tools/node_modules/eslint/package.json @@ -1,6 +1,6 @@ { "name": "eslint", - "version": "7.29.0", + "version": "7.30.0", "author": "Nicholas C. Zakas ", "description": "An AST-based pattern checker for JavaScript.", "bin": { @@ -45,6 +45,7 @@ "dependencies": { "@babel/code-frame": "7.12.11", "@eslint/eslintrc": "^0.4.2", + "@humanwhocodes/config-array": "^0.5.0", "ajv": "^6.10.0", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", @@ -95,7 +96,7 @@ "ejs": "^3.0.2", "eslint": "file:.", "eslint-config-eslint": "file:packages/eslint-config-eslint", - "eslint-plugin-eslint-plugin": "^3.0.3", + "eslint-plugin-eslint-plugin": "^3.2.0", "eslint-plugin-internal-rules": "file:tools/internal-rules", "eslint-plugin-jsdoc": "^25.4.3", "eslint-plugin-node": "^11.1.0", From a01dacfdcdcefe497475c148a9471842b7cf6ebb Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 4 Jul 2021 18:45:38 -0700 Subject: [PATCH 071/133] doc: move Sam Ruby to emeritus Sam confirmed in email that they are not opposed to moving to emeritus at this time. PR-URL: https://github.com/nodejs/node/pull/39264 Reviewed-By: Richard Lau Reviewed-By: Gireesh Punathil Reviewed-By: Beth Griggs Reviewed-By: Colin Ihrig Reviewed-By: Sam Ruby --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 39a47f0f7c6c3a..1ece184a4952c0 100644 --- a/README.md +++ b/README.md @@ -411,8 +411,6 @@ For information about the governance of the Node.js project, see **Ricky Zhou** <0x19951125@gmail.com> (he/him) * [ronag](https://github.com/ronag) - **Robert Nagy** <ronagy@icloud.com> -* [rubys](https://github.com/rubys) - -**Sam Ruby** <rubys@intertwingly.net> * [ruyadorno](https://github.com/ruyadorno) - **Ruy Adorno** <ruyadorno@github.com> (he/him) * [rvagg](https://github.com/rvagg) - @@ -568,6 +566,8 @@ For information about the governance of the Node.js project, see **Ron Korving** <ron@ronkorving.nl> * [RReverser](https://github.com/RReverser) - **Ingvar Stepanyan** <me@rreverser.com> +* [rubys](https://github.com/rubys) - +**Sam Ruby** <rubys@intertwingly.net> * [sam-github](https://github.com/sam-github) - **Sam Roberts** <vieuxtech@gmail.com> * [sebdeckers](https://github.com/sebdeckers) - From 4dd6ab389aa940f98dff2d1f2bcd675b2a7b665b Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 3 Jul 2021 12:54:55 -0700 Subject: [PATCH 072/133] doc: remove onboarding-extras Migrate last bit of onboarding-extras content to collaborator-guide and remove onboarding-extras. PR-URL: https://github.com/nodejs/node/pull/39252 Reviewed-By: Gireesh Punathil Reviewed-By: Richard Lau Reviewed-By: Beth Griggs Reviewed-By: Colin Ihrig --- .github/CODEOWNERS | 1 - doc/guides/collaborator-guide.md | 80 ++++++++++++++++++++++++++++++++ doc/guides/onboarding-extras.md | 80 -------------------------------- onboarding.md | 3 +- 4 files changed, 82 insertions(+), 82 deletions(-) delete mode 100644 doc/guides/onboarding-extras.md diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8198c8e23f269e..77333a671a5582 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -22,7 +22,6 @@ /doc/guides/contributing/pull-requests.md @nodejs/tsc /doc/guides/collaborator-guide.md @nodejs/tsc /doc/guides/offboarding.md @nodejs/tsc -/doc/guides/onboarding-extras.md @nodejs/tsc # streams diff --git a/doc/guides/collaborator-guide.md b/doc/guides/collaborator-guide.md index dae3b8579de07f..cdc6c6da1dbad3 100644 --- a/doc/guides/collaborator-guide.md +++ b/doc/guides/collaborator-guide.md @@ -831,6 +831,86 @@ When things need extra attention, are controversial, or `semver-major`: If you cannot find who to cc for a file, `git shortlog -n -s ` can help. +## Labels + +### General labels + +* `confirmed-bug`: Bugs you have verified +* `discuss`: Things that need larger discussion +* `feature request`: Any issue that requests a new feature +* `good first issue`: Issues suitable for newcomers to fix +* `meta`: Governance, policies, procedures, etc. +* `tsc-agenda`: Open issues and pull requests with this label will be added to + the Technical Steering Committee meeting agenda + +--- + +* `author-ready` - A pull request is _author ready_ when: + * There is a CI run in progress or completed. + * There is at least one Collaborator approval (or two TSC approvals for + semver-major pull requests). + * There are no outstanding review comments. + +Please always add the `author ready` label to pull requests that qualify. +Please always remove it again as soon as the conditions are not met anymore, +such as if the CI run fails or a new outstanding review comment is posted. + +--- + +* `semver-{minor,major}` + * be conservative – that is, if a change has the remote *chance* of breaking + something, go for semver-major + * when adding a semver label, add a comment explaining why you're adding it + * minor vs. patch: roughly: "does it add a new method / does it add a new + section to the docs" + * major vs. everything else: run last versions tests against this version, if + they pass, **probably** minor or patch + +### LTS/version labels + +We use labels to keep track of which branches a commit should land on: + +* `dont-land-on-v?.x` + * For changes that do not apply to a certain release line + * Also used when the work of backporting a change outweighs the benefits +* `land-on-v?.x` + * Used by releasers to mark a pull request as scheduled for inclusion in an + LTS release + * Applied to the original pull request for clean cherry-picks, to the backport + pull request otherwise +* `backport-requested-v?.x` + * Used to indicate that a pull request needs a manual backport to a branch in + order to land the changes on that branch + * Typically applied by a releaser when the pull request does not apply cleanly + or it breaks the tests after applying + * Will be replaced by either `dont-land-on-v?.x` or `backported-to-v?.x` +* `backported-to-v?.x` + * Applied to pull requests for which a backport pull request has been merged +* `lts-watch-v?.x` + * Applied to pull requests which the Release working group should consider + including in an LTS release + * Does not indicate that any specific action will be taken, but can be + effective as messaging to non-collaborators +* `release-agenda` + * For things that need discussion by the Release working group + * (for example semver-minor changes that need or should go into an LTS + release) +* `v?.x` + * Automatically applied to changes that do not target `master` but rather the + `v?.x-staging` branch + +Once a release line enters maintenance mode, the corresponding labels do not +need to be attached anymore, as only important bugfixes will be included. + +### Other labels + +* Operating system labels + * `macos`, `windows`, `smartos`, `aix` + * No `linux` label because it is the implied default +* Architecture labels + * `arm`, `mips`, `s390`, `ppc` + * No `x86{_64}` label because it is the implied default + ["Merge Pull Request"]: https://help.github.com/articles/merging-a-pull-request/#merging-a-pull-request-on-github [Deprecation]: https://en.wikipedia.org/wiki/Deprecation [SECURITY.md]: https://github.com/nodejs/node/blob/HEAD/SECURITY.md diff --git a/doc/guides/onboarding-extras.md b/doc/guides/onboarding-extras.md deleted file mode 100644 index 79951a433926c6..00000000000000 --- a/doc/guides/onboarding-extras.md +++ /dev/null @@ -1,80 +0,0 @@ -# Additional onboarding information - -## Labels - -### General - -* `confirmed-bug`: Bugs you have verified -* `discuss`: Things that need larger discussion -* `feature request`: Any issue that requests a new feature -* `good first issue`: Issues suitable for newcomers to fix -* `meta`: Governance, policies, procedures, etc. -* `tsc-agenda`: Open issues and pull requests with this label will be added to - the Technical Steering Committee meeting agenda - ---- - -* `author-ready` - A pull request is _author ready_ when: - * There is a CI run in progress or completed. - * There is at least one Collaborator approval (or two TSC approvals for - semver-major PRs). - * There are no outstanding review comments. - -Please always add the `author ready` label to pull requests that qualify. -Please always remove it again as soon as the conditions are not met anymore, -such as if the CI run fails or a new outstanding review comment is posted. - ---- - -* `semver-{minor,major}` - * be conservative – that is, if a change has the remote *chance* of breaking - something, go for semver-major - * when adding a semver label, add a comment explaining why you're adding it - * minor vs. patch: roughly: "does it add a new method / does it add a new - section to the docs" - * major vs. everything else: run last versions tests against this version, if - they pass, **probably** minor or patch - -### LTS/version labels - -We use labels to keep track of which branches a commit should land on: - -* `dont-land-on-v?.x` - * For changes that do not apply to a certain release line - * Also used when the work of backporting a change outweighs the benefits -* `land-on-v?.x` - * Used by releasers to mark a PR as scheduled for inclusion in an LTS release - * Applied to the original PR for clean cherry-picks, to the backport PR - otherwise -* `backport-requested-v?.x` - * Used to indicate that a PR needs a manual backport to a branch in order to - land the changes on that branch - * Typically applied by a releaser when the PR does not apply cleanly or it - breaks the tests after applying - * Will be replaced by either `dont-land-on-v?.x` or `backported-to-v?.x` -* `backported-to-v?.x` - * Applied to PRs for which a backport PR has been merged -* `lts-watch-v?.x` - * Applied to PRs which the LTS working group should consider including in a - LTS release - * Does not indicate that any specific action will be taken, but can be - effective as messaging to non-collaborators -* `lts-agenda` - * For things that need discussion by the LTS working group - * (for example semver-minor changes that need or should go into an LTS - release) -* `v?.x` - * Automatically applied to changes that do not target `master` but rather the - `v?.x-staging` branch - -Once a release line enters maintenance mode, the corresponding labels do not -need to be attached anymore, as only important bugfixes will be included. - -### Other labels - -* Operating system labels - * `macos`, `windows`, `smartos`, `aix` - * No `linux` label because it is the implied default -* Architecture labels - * `arm`, `mips`, `s390`, `ppc` - * No `x86{_64}` label because it is the implied default diff --git a/onboarding.md b/onboarding.md index d1eca3a4c40f79..52104b9fb26128 100644 --- a/onboarding.md +++ b/onboarding.md @@ -81,7 +81,7 @@ The project has two venues for real-time discussion: * Be nice about closing issues! Let people know why, and that issues and PRs can be reopened if necessary -* [**See "Labels"**](./doc/guides/onboarding-extras.md#labels) +* See [Labels][]. * There is [a bot](https://github.com/nodejs-github-bot/github-bot) that applies subsystem labels (for example, `doc`, `test`, `assert`, or `buffer`) so that we know what parts of the code base the pull request modifies. It is @@ -239,6 +239,7 @@ needs to be pointed out separately during the onboarding. the [summit](https://github.com/nodejs/summit) repository for details. [Code of Conduct]: https://github.com/nodejs/admin/blob/HEAD/CODE_OF_CONDUCT.md +[Labels]: doc/guides/collaborator-guide.md#labels [Landing Pull Requests]: doc/guides/collaborator-guide.md#landing-pull-requests [Publicizing or hiding organization membership]: https://help.github.com/articles/publicizing-or-hiding-organization-membership/ [`author-ready`]: doc/guides/collaborator-guide.md#author-ready-pull-requests From 04bcfcfff10fffa7c37bb7a1411a60603b340d3d Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 29 Jun 2021 16:42:47 -0400 Subject: [PATCH 073/133] doc: update Node-api version matrix MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - add 14.x version in which Node-api version 8 was added. Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/39197 Reviewed-By: James M Snell Reviewed-By: Gerhard Stöbich Reviewed-By: Luigi Pinca Reviewed-By: Chengzhong Wu --- doc/api/n-api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/n-api.md b/doc/api/n-api.md index ec15f7cf7a41b2..d06cbd1ba70481 100644 --- a/doc/api/n-api.md +++ b/doc/api/n-api.md @@ -326,7 +326,7 @@ listed as supporting a later version. v14.0.0 v14.0.0 v14.12.0 - + v14.17.0 v15.x From 3ed04994b70fa7973d7c471105e8523e6794be80 Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Tue, 29 Jun 2021 10:04:18 +0100 Subject: [PATCH 074/133] build: shorten path used in tarball build workflow Shorten the path to the workspace for the GitHub Actions `build-tarball` workflow to avoid `execvp: printf: Argument list too long` errors from `make`. GitHub currently runs workflows in a `/home/runner/work/my-repo/my-repo` directory where `my-repo` is the repository name and is repeated twice (the second is from the git checkout). Some of the command lines in the Node.js build, e.g. the `ar` command to create static libraries, pass several fully qualified paths to filenames so the workflow directory is repeat many times. The most recent V8 update added more files to the command and has now tipped the command line length over the maximum allowed when using forks of the `node` repository with a longer name (e.g. `node-auto-test` and the private fork used to prepare security releases). Use GitHub's `RUNNER_TEMP` environment variable to extract the source tarball into the temporary directory on the GitHub runner. This is currently `/home/runner/work/_temp` and is not dependent on the name of the repository. PR-URL: https://github.com/nodejs/node/pull/39192 Reviewed-By: Jiawen Geng Reviewed-By: Beth Griggs Reviewed-By: Michael Dawson Reviewed-By: James M Snell --- .github/workflows/build-tarball.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 1f802ca12b56c6..7022b6c1142678 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -58,8 +58,8 @@ jobs: name: tarballs - name: Extract tarball run: | - tar xzf tarballs/*.tar.gz - echo "TAR_DIR=`basename tarballs/*.tar.gz .tar.gz`" >> $GITHUB_ENV + tar xzf tarballs/*.tar.gz -C $RUNNER_TEMP + echo "TAR_DIR=$RUNNER_TEMP/`basename tarballs/*.tar.gz .tar.gz`" >> $GITHUB_ENV - name: Copy directories needed for testing run: | cp -r tools/node_modules $TAR_DIR/tools From 0673ede3ad60bdb28d154059d9a09777bb485187 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Thu, 1 Jul 2021 17:47:07 +0200 Subject: [PATCH 075/133] tools: take ownership of deps/v8/tools/node The files are not maintained nor used upstream anymore. PR-URL: https://github.com/nodejs/node/pull/39222 Reviewed-By: Richard Lau Reviewed-By: Jiawen Geng Reviewed-By: Colin Ihrig --- .gitignore | 2 +- tools/make-v8.sh | 2 +- tools/v8/fetch_deps.py | 101 ++++++++++++++++++++++++++++++++++++++++ tools/v8/node_common.py | 54 +++++++++++++++++++++ 4 files changed, 157 insertions(+), 2 deletions(-) create mode 100755 tools/v8/fetch_deps.py create mode 100755 tools/v8/node_common.py diff --git a/.gitignore b/.gitignore index b46679450bdbe6..08b44ff9561db3 100644 --- a/.gitignore +++ b/.gitignore @@ -133,7 +133,7 @@ tools/*/*.i.tmp /deps/uv/.github/ /deps/uv/docs/code/ /deps/uv/docs/src/guide/ -# Ignore dependencies fetched by deps/v8/tools/node/fetch_deps.py +# Ignore dependencies fetched by tools/v8/fetch_deps.py /deps/.cipd # === Rules for Windows vcbuild.bat === diff --git a/tools/make-v8.sh b/tools/make-v8.sh index 79ab02af275aa9..c63cb9ccf77301 100755 --- a/tools/make-v8.sh +++ b/tools/make-v8.sh @@ -7,7 +7,7 @@ V8_BUILD_OPTIONS=$2 cd deps/v8 || exit find . -type d -name .git -print0 | xargs -0 rm -rf -tools/node/fetch_deps.py . +../../tools/v8/fetch_deps.py . ARCH="`arch`" if [ "$ARCH" = "s390x" ] || [ "$ARCH" = "ppc64le" ]; then diff --git a/tools/v8/fetch_deps.py b/tools/v8/fetch_deps.py new file mode 100755 index 00000000000000..ee5b629e2b0e59 --- /dev/null +++ b/tools/v8/fetch_deps.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +""" +Use this script to fetch all dependencies for V8 to run build_gn.py. + +Usage: fetch_deps.py +""" + +# for py2/py3 compatibility +from __future__ import print_function + +import os +import subprocess +import sys + +import node_common + +GCLIENT_SOLUTION = [ + { "name" : "v8", + "url" : "https://chromium.googlesource.com/v8/v8.git", + "deps_file" : "DEPS", + "managed" : False, + "custom_deps" : { + # These deps are already part of Node.js. + "v8/base/trace_event/common" : None, + "v8/third_party/googletest/src" : None, + # These deps are unnecessary for building. + "v8/test/benchmarks/data" : None, + "v8/testing/gmock" : None, + "v8/test/mozilla/data" : None, + "v8/test/test262/data" : None, + "v8/test/test262/harness" : None, + "v8/third_party/android_ndk" : None, + "v8/third_party/android_sdk" : None, + "v8/third_party/catapult" : None, + "v8/third_party/colorama/src" : None, + "v8/third_party/fuchsia-sdk" : None, + "v8/third_party/instrumented_libraries" : None, + "v8/tools/luci-go" : None, + "v8/tools/swarming_client" : None, + "v8/third_party/qemu-linux-x64" : None, + }, + }, +] + +def EnsureGit(v8_path): + def git(args): + # shell=True needed on Windows to resolve git.bat. + return subprocess.check_output( + "git " + args, cwd=v8_path, shell=True).strip() + + expected_git_dir = os.path.join(v8_path, ".git") + actual_git_dir = git("rev-parse --absolute-git-dir") + if expected_git_dir == actual_git_dir: + print("V8 is tracked stand-alone by git.") + return False + print("Initializing temporary git repository in v8.") + git("init") + git("config user.name \"Ada Lovelace\"") + git("config user.email ada@lovela.ce") + git("commit --allow-empty -m init") + return True + +def FetchDeps(v8_path): + # Verify path. + v8_path = os.path.abspath(v8_path) + assert os.path.isdir(v8_path) + + # Check out depot_tools if necessary. + depot_tools = node_common.EnsureDepotTools(v8_path, True) + + temporary_git = EnsureGit(v8_path) + try: + print("Fetching dependencies.") + env = os.environ.copy() + # gclient needs to have depot_tools in the PATH. + env["PATH"] = depot_tools + os.pathsep + env["PATH"] + gclient = os.path.join(depot_tools, "gclient.py") + spec = "solutions = %s" % GCLIENT_SOLUTION + subprocess.check_call([sys.executable, gclient, "sync", "--spec", spec], + cwd=os.path.join(v8_path, os.path.pardir), + env=env) + except: + raise + finally: + if temporary_git: + node_common.UninitGit(v8_path) + # Clean up .gclient_entries file. + gclient_entries = os.path.normpath( + os.path.join(v8_path, os.pardir, ".gclient_entries")) + if os.path.isfile(gclient_entries): + os.remove(gclient_entries) + + return depot_tools + + +if __name__ == "__main__": + FetchDeps(sys.argv[1]) diff --git a/tools/v8/node_common.py b/tools/v8/node_common.py new file mode 100755 index 00000000000000..2efb21860e3cb9 --- /dev/null +++ b/tools/v8/node_common.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python +# Copyright 2017 the V8 project authors. All rights reserved. +# Use of this source code is governed by a BSD-style license that can be +# found in the LICENSE file. + +# for py2/py3 compatibility +from __future__ import print_function + +import os +import pipes +import shutil +import stat +import subprocess +import sys + +DEPOT_TOOLS_URL = \ + "https://chromium.googlesource.com/chromium/tools/depot_tools.git" + +def EnsureDepotTools(v8_path, fetch_if_not_exist): + def _Get(v8_path): + depot_tools = os.path.join(v8_path, "_depot_tools") + try: + gclient_path = os.path.join(depot_tools, "gclient.py") + if os.path.isfile(gclient_path): + return depot_tools + except: + pass + if fetch_if_not_exist: + print("Checking out depot_tools.") + # shell=True needed on Windows to resolve git.bat. + subprocess.check_call("git clone {} {}".format( + pipes.quote(DEPOT_TOOLS_URL), + pipes.quote(depot_tools)), shell=True) + # Using check_output to hide warning messages. + subprocess.check_output( + [sys.executable, gclient_path, "metrics", "--opt-out"], + cwd=depot_tools) + return depot_tools + return None + depot_tools = _Get(v8_path) + assert depot_tools is not None + print("Using depot tools in %s" % depot_tools) + return depot_tools + +def UninitGit(v8_path): + print("Uninitializing temporary git repository") + target = os.path.join(v8_path, ".git") + if os.path.isdir(target): + print(">> Cleaning up %s" % target) + def OnRmError(func, path, exec_info): + # This might happen on Windows + os.chmod(path, stat.S_IWRITE) + os.unlink(path) + shutil.rmtree(target, onerror=OnRmError) From 13755599e16fb54838c10bd743e05c86d7d6f4ff Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 2 Jul 2021 06:44:15 -0700 Subject: [PATCH 076/133] test: remove workaround code in debugger test Remove code that made a check more lenient to account for a known issue that is no longer reproducible. Refs: https://github.com/nodejs/node-inspect/issues/35 PR-URL: https://github.com/nodejs/node/pull/39238 Reviewed-By: Colin Ihrig Reviewed-By: Jan Krems --- test/sequential/test-debugger-preserve-breaks.js | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/test/sequential/test-debugger-preserve-breaks.js b/test/sequential/test-debugger-preserve-breaks.js index a52adf606a1cff..fbc463af96a1e6 100644 --- a/test/sequential/test-debugger-preserve-breaks.js +++ b/test/sequential/test-debugger-preserve-breaks.js @@ -53,19 +53,9 @@ const path = require('path'); }) .then(() => cli.command('breakpoints')) .then(() => { - // TODO: There is a known issue on AIX and some other operating systems - // where the breakpoints aren't properly resolved yet when we reach this - // point. Eventually that should be figured out but for now we don't - // want to fail builds because of it. - // What it should be: - // - // const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; - // assert.ok(cli.output.includes(`#0 ${script}:2`), msg); - // assert.ok(cli.output.includes(`#1 ${script}:3`), msg); - // - // What we're doing for now instead: - assert.match(cli.output, /#0 [^\n]+three-lines\.js\$?:2/); - assert.match(cli.output, /#1 [^\n]+three-lines\.js\$?:3/); + const msg = `SCRIPT: ${script}, OUTPUT: ${cli.output}`; + assert.ok(cli.output.includes(`#0 ${script}:2`), msg); + assert.ok(cli.output.includes(`#1 ${script}:3`), msg); }) .then(() => cli.quit()) .then(null, onFatal); From eacee0ab176caa629fb07ccc3fdea83345f9712b Mon Sep 17 00:00:00 2001 From: Michael Dawson Date: Tue, 29 Jun 2021 17:13:59 -0400 Subject: [PATCH 077/133] doc: use "repository" in guides versus repo Fix remaining instances in the guides where we use repo instead of repository. Signed-off-by: Michael Dawson PR-URL: https://github.com/nodejs/node/pull/39198 Reviewed-By: James M Snell Reviewed-By: Harshitha K P Reviewed-By: Rich Trott Reviewed-By: Gireesh Punathil --- doc/guides/releases.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/guides/releases.md b/doc/guides/releases.md index 832b8c8eda0680..5087f4e0bb69be 100644 --- a/doc/guides/releases.md +++ b/doc/guides/releases.md @@ -583,9 +583,9 @@ $ git push upstream master ### 14. Push the release tag -Push the tag to the repo before you promote the builds. If you haven't pushed -your tag first, then build promotion won't work properly. Push the tag using the -following command: +Push the tag to the repository before you promote the builds. If you +haven't pushed your tag first, then build promotion won't work properly. +Push the tag using the following command: ```console $ git push From 146f733f43887d9b314bd5bcf92d9b4d6ccff6b5 Mon Sep 17 00:00:00 2001 From: Cyrille Bourgois Date: Tue, 6 Jul 2021 19:29:16 +0200 Subject: [PATCH 078/133] doc: fix constants usage in fs.access example PR-URL: https://github.com/nodejs/node/pull/39289 Reviewed-By: Luigi Pinca Reviewed-By: Antoine du Hamel --- doc/api/fs.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index 9ef306cd1be2f8..997e0ce2b490b8 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1393,7 +1393,7 @@ access(file, constants.W_OK, (err) => { }); // Check if the file exists in the current directory, and if it is writable. -access(file, constants.F_OK | fs.constants.W_OK, (err) => { +access(file, constants.F_OK | constants.W_OK, (err) => { if (err) { console.error( `${file} ${err.code === 'ENOENT' ? 'does not exist' : 'is read-only'}`); From 4d53c63c223b29930af5377a62f5e219b5964b7e Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 5 Jul 2021 10:00:10 -0700 Subject: [PATCH 079/133] doc: fix boldface punctuation for full sentences MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit If an entire sentence is in boldface, then generally the terminating punctuation should as well. PR-URL: https://github.com/nodejs/node/pull/39278 Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: Tobias Nießen --- doc/api/domain.md | 2 +- doc/api/fs.md | 6 +++--- doc/api/punycode.md | 2 +- doc/api/vm.md | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/doc/api/domain.md b/doc/api/domain.md index 000878ea70d9fa..fd75be9cd49779 100644 --- a/doc/api/domain.md +++ b/doc/api/domain.md @@ -20,7 +20,7 @@ changes: -**This module is pending deprecation**. Once a replacement API has been +**This module is pending deprecation.** Once a replacement API has been finalized, this module will be fully deprecated. Most developers should **not** have cause to use this module. Users who absolutely must have the functionality that domains provide may rely on it for the time being diff --git a/doc/api/fs.md b/doc/api/fs.md index 997e0ce2b490b8..fa7dd3c70cb947 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -1087,7 +1087,7 @@ changes: option is not `true`. **Default:** `0`. * `recursive` {boolean} If `true`, perform a recursive directory removal. In recursive mode, operations are retried on failure. **Default:** `false`. - **Deprecated**. + **Deprecated.** * `retryDelay` {integer} The amount of time in milliseconds to wait between retries. This option is ignored if the `recursive` option is not `true`. **Default:** `100`. @@ -3216,7 +3216,7 @@ changes: option is not `true`. **Default:** `0`. * `recursive` {boolean} If `true`, perform a recursive directory removal. In recursive mode, operations are retried on failure. **Default:** `false`. - **Deprecated**. + **Deprecated.** * `retryDelay` {integer} The amount of time in milliseconds to wait between retries. This option is ignored if the `recursive` option is not `true`. **Default:** `100`. @@ -4854,7 +4854,7 @@ changes: option is not `true`. **Default:** `0`. * `recursive` {boolean} If `true`, perform a recursive directory removal. In recursive mode, operations are retried on failure. **Default:** `false`. - **Deprecated**. + **Deprecated.** * `retryDelay` {integer} The amount of time in milliseconds to wait between retries. This option is ignored if the `recursive` option is not `true`. **Default:** `100`. diff --git a/doc/api/punycode.md b/doc/api/punycode.md index c9c20ce6350c7e..7f4e5fcf003e75 100644 --- a/doc/api/punycode.md +++ b/doc/api/punycode.md @@ -9,7 +9,7 @@ deprecated: v7.0.0 -**The version of the punycode module bundled in Node.js is being deprecated**. +**The version of the punycode module bundled in Node.js is being deprecated.** In a future major version of Node.js this module will be removed. Users currently depending on the `punycode` module should switch to using the userland-provided [Punycode.js][] module instead. For punycode-based URL diff --git a/doc/api/vm.md b/doc/api/vm.md index df9dc824f6a094..49297ada3c65e9 100644 --- a/doc/api/vm.md +++ b/doc/api/vm.md @@ -10,7 +10,7 @@ The `vm` module enables compiling and running code within V8 Virtual Machine contexts. **The `vm` module is not a security mechanism. Do -not use it to run untrusted code**. +not use it to run untrusted code.** JavaScript code can be compiled and run immediately or compiled, saved, and run later. From 175a6569f4f6f139d753f97de56d998dd2d46cf9 Mon Sep 17 00:00:00 2001 From: Jacob <3012099+JakobJingleheimer@users.noreply.github.com> Date: Sun, 27 Jun 2021 10:24:04 +0200 Subject: [PATCH 080/133] doc: add annotation to writeFile `data` as `Object` Fixes: https://github.com/nodejs/node/issues/39152 PR-URL: https://github.com/nodejs/node/pull/39167 Reviewed-By: Antoine du Hamel --- doc/api/fs.md | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/doc/api/fs.md b/doc/api/fs.md index fa7dd3c70cb947..222686c555070e 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -440,6 +440,9 @@ changes: Write `buffer` to the file. +If `buffer` is a plain object, it must have an own (not inherited) `toString` +function property. + The promise is resolved with an object containing two properties: * `bytesWritten` {integer} the number of bytes written @@ -1284,8 +1287,8 @@ changes: * Returns: {Promise} Fulfills with `undefined` upon success. Asynchronously writes data to a file, replacing the file if it already exists. -`data` can be a string, a {Buffer}, or an object with an own `toString` function -property. +`data` can be a string, a {Buffer}, or, an object with an own (not inherited) +`toString` function property. The `encoding` option is ignored if `data` is a buffer. @@ -3937,7 +3940,9 @@ When `file` is a file descriptor, the behavior is similar to calling a file descriptor. The `encoding` option is ignored if `data` is a buffer. -If `data` is a normal object, it must have an own `toString` function property. + +If `data` is a plain object, it must have an own (not inherited) `toString` +function property. ```mjs import { writeFile } from 'fs'; @@ -5031,6 +5036,9 @@ changes: Returns `undefined`. +If `data` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.writeFile()`][]. @@ -5065,6 +5073,9 @@ changes: * `position` {integer} * Returns: {number} The number of bytes written. +If `buffer` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.write(fd, buffer...)`][]. @@ -5091,6 +5102,9 @@ changes: * `encoding` {string} * Returns: {number} The number of bytes written. +If `string` is a plain object, it must have an own (not inherited) `toString` +function property. + For detailed information, see the documentation of the asynchronous version of this API: [`fs.write(fd, string...)`][]. From 9184259a547d7ab51192a4a5a633efcddd3ce56a Mon Sep 17 00:00:00 2001 From: Voltrex Date: Sun, 6 Jun 2021 01:46:27 +0430 Subject: [PATCH 081/133] src: add JSDoc typings for v8 Added JSDoc typings for the `v8` lib module. PR-URL: https://github.com/nodejs/node/pull/38944 Reviewed-By: James M Snell Reviewed-By: Michael Dawson Reviewed-By: Zijian Liu --- lib/v8.js | 78 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 78 insertions(+) diff --git a/lib/v8.js b/lib/v8.js index e7a44331b8b350..b4e2d5cd1a751a 100644 --- a/lib/v8.js +++ b/lib/v8.js @@ -58,6 +58,12 @@ const { } = internalBinding('heap_utils'); const { HeapSnapshotStream } = require('internal/heap_utils'); +/** + * Generates a snapshot of the current V8 heap + * and writes it to a JSON file. + * @param {string} [filename] + * @returns {string} + */ function writeHeapSnapshot(filename) { if (filename !== undefined) { filename = getValidatedPath(filename); @@ -66,6 +72,11 @@ function writeHeapSnapshot(filename) { return triggerHeapSnapshot(filename); } +/** + * Generates a snapshot of the current V8 heap + * and returns a Readable Stream. + * @returns {import('./stream.js').Readable} + */ function getHeapSnapshot() { const handle = createHeapSnapshotStream(); assert(handle); @@ -111,11 +122,32 @@ const { const kNumberOfHeapSpaces = kHeapSpaces.length; +/** + * Sets V8 command-line flags. + * @param {string} flags + * @returns {void} + */ function setFlagsFromString(flags) { validateString(flags, 'flags'); _setFlagsFromString(flags); } +/** + * Gets the current V8 heap statistics. + * @returns {{ + * total_heap_size: number; + * total_heap_size_executable: number; + * total_physical_size: number; + * total_available_size: number; + * used_heap_size: number; + * heap_size_limit: number; + * malloced_memory: number; + * peak_malloced_memory: number; + * does_zap_garbage: number; + * number_of_native_contexts: number; + * number_of_detached_contexts: number; + * }} + */ function getHeapStatistics() { const buffer = binding.heapStatisticsBuffer; @@ -136,6 +168,16 @@ function getHeapStatistics() { }; } +/** + * Gets the current V8 heap space statistics. + * @returns {{ + * space_name: string; + * space_size: number; + * space_used_size: number; + * space_available_size: number; + * physical_space_size: number; + * }[]} + */ function getHeapSpaceStatistics() { const heapSpaceStatistics = new Array(kNumberOfHeapSpaces); const buffer = binding.heapSpaceStatisticsBuffer; @@ -154,6 +196,14 @@ function getHeapSpaceStatistics() { return heapSpaceStatistics; } +/** + * Gets the current V8 heap code statistics. + * @returns {{ + * code_and_metadata_size: number; + * bytecode_and_metadata_size: number; + * external_script_source_size: number; + * }} + */ function getHeapCodeStatistics() { const buffer = binding.heapCodeStatisticsBuffer; @@ -170,6 +220,11 @@ function getHeapCodeStatistics() { /* JS methods for the base objects */ Serializer.prototype._getDataCloneError = Error; +/** + * Reads raw bytes from the deserializer's internal buffer. + * @param {number} length + * @returns {Buffer} + */ Deserializer.prototype.readRawBytes = function readRawBytes(length) { const offset = this._readRawBytes(length); // `this.buffer` can be a Buffer or a plain Uint8Array, so just calling @@ -210,6 +265,12 @@ class DefaultSerializer extends Serializer { this._setTreatArrayBufferViewsAsHostObjects(true); } + /** + * Used to write some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @param {Object} abView + * @returns {void} + */ _writeHostObject(abView) { let i = 0; if (abView.constructor === Buffer) { @@ -232,6 +293,11 @@ class DefaultSerializer extends Serializer { } class DefaultDeserializer extends Deserializer { + /** + * Used to read some kind of host object, i.e. an + * object that is created by native C++ bindings. + * @returns {any} + */ _readHostObject() { const typeIndex = this.readUint32(); const ctor = arrayBufferViewTypes[typeIndex]; @@ -254,6 +320,12 @@ class DefaultDeserializer extends Deserializer { } } +/** + * Uses a `DefaultSerializer` to serialize `value` + * into a buffer. + * @param {any} value + * @returns {Buffer} + */ function serialize(value) { const ser = new DefaultSerializer(); ser.writeHeader(); @@ -261,6 +333,12 @@ function serialize(value) { return ser.releaseBuffer(); } +/** + * Uses a `DefaultDeserializer` with default options + * to read a JavaScript value from a buffer. + * @param {Buffer | TypedArray | DataView} buffer + * @returns {any} + */ function deserialize(buffer) { const der = new DefaultDeserializer(buffer); der.readHeader(); From f06ebf17758729819c48d62340cfa566985e6c37 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 5 Jul 2021 13:58:54 -0700 Subject: [PATCH 082/133] doc: remove emailing the TSC from offboarding doc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Emailing the TSC seems superfluous. Removing it. PR-URL: https://github.com/nodejs/node/pull/39280 Reviewed-By: Colin Ihrig Reviewed-By: Antoine du Hamel Reviewed-By: Robert Nagy Reviewed-By: Michael Dawson Reviewed-By: Gireesh Punathil Reviewed-By: Matteo Collina Reviewed-By: Beth Griggs Reviewed-By: Tobias Nießen --- doc/guides/offboarding.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/guides/offboarding.md b/doc/guides/offboarding.md index 13f602bb0f8286..0d73e412089b7f 100644 --- a/doc/guides/offboarding.md +++ b/doc/guides/offboarding.md @@ -6,8 +6,6 @@ Emeritus or leaves the project. * Remove the Collaborator from the @nodejs/collaborators team. * Open a fast-track pull request to move the Collaborator to Collaborator Emeriti in README.md. -* Email the TSC mailing list to notify TSC members that the Collaborator is - moving to Collaborator Emeritus. * Determine what GitHub teams the Collaborator belongs to. In consultation with the Collaborator, determine which of those teams they should be removed from. * Some teams may also require a pull request to remove the Collaborator from From a7cd40ed8d8ed0eeacd21263a69efd2694594d94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?J=C3=A9r=C3=A9my=20Lal?= Date: Mon, 5 Jul 2021 00:22:46 +0200 Subject: [PATCH 083/133] build: uvwasi honours node_shared_libuv MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fix #39248. PR-URL: https://github.com/nodejs/node/pull/39260 Reviewed-By: Richard Lau Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- deps/uvwasi/uvwasi.gyp | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/deps/uvwasi/uvwasi.gyp b/deps/uvwasi/uvwasi.gyp index d4189eeee2fc94..5822f8d59bd9e9 100644 --- a/deps/uvwasi/uvwasi.gyp +++ b/deps/uvwasi/uvwasi.gyp @@ -18,9 +18,6 @@ 'src/wasi_rights.c', 'src/wasi_serdes.c', ], - 'dependencies': [ - '../uv/uv.gyp:libuv', - ], 'direct_dependent_settings': { 'include_dirs': ['include'] }, @@ -31,6 +28,11 @@ '_POSIX_C_SOURCE=200112', ], }], + [ 'node_shared_libuv=="false"', { + 'dependencies': [ + '../uv/uv.gyp:libuv', + ], + }], ], } ] From 33cad271c583536befefaa9d320228c59c122ad3 Mon Sep 17 00:00:00 2001 From: Gus Caplan Date: Mon, 28 Jun 2021 11:08:33 -0500 Subject: [PATCH 084/133] errors: remove eager stack generation for node errors PR-URL: https://github.com/nodejs/node/pull/39182 Reviewed-By: Guy Bedford Reviewed-By: James M Snell --- lib/internal/errors.js | 142 +++++++++--------- lib/internal/http2/util.js | 14 +- .../source_map/prepare_stack_trace.js | 10 +- test/message/esm_loader_not_found.out | 6 +- test/parallel/test-repl-top-level-await.js | 12 +- 5 files changed, 104 insertions(+), 80 deletions(-) diff --git a/lib/internal/errors.js b/lib/internal/errors.js index ed3fa3787e5eec..fb01b8f6731ff1 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -33,6 +33,7 @@ const { Number, NumberIsInteger, ObjectDefineProperty, + ObjectDefineProperties, ObjectIsExtensible, ObjectGetOwnPropertyDescriptor, ObjectKeys, @@ -58,6 +59,8 @@ const { URIError, } = primordials; +const kIsNodeError = Symbol('kIsNodeError'); + const isWindows = process.platform === 'win32'; const messages = new SafeMap(); @@ -116,7 +119,12 @@ const prepareStackTrace = (globalThis, error, trace) => { // Error: Message // at function (file) // at file - const errorString = ErrorPrototypeToString(error); + let errorString; + if (kIsNodeError in error) { + errorString = `${error.name} [${error.code}]: ${error.message}`; + } else { + errorString = ErrorPrototypeToString(error); + } if (trace.length === 0) { return errorString; } @@ -186,27 +194,6 @@ function lazyBuffer() { return buffer; } -const addCodeToName = hideStackFrames(function addCodeToName(err, name, code) { - // Set the stack - err = captureLargerStackTrace(err); - // Add the error code to the name to include it in the stack trace. - err.name = `${name} [${code}]`; - // Access the stack to generate the error message including the error code - // from the name. - err.stack; // eslint-disable-line no-unused-expressions - // Reset the name to the actual name. - if (name === 'SystemError') { - ObjectDefineProperty(err, 'name', { - value: name, - enumerable: false, - writable: true, - configurable: true - }); - } else { - delete err.name; - } -}); - function isErrorStackTraceLimitWritable() { const desc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); if (desc === undefined) { @@ -242,43 +229,55 @@ class SystemError extends Error { if (context.dest !== undefined) message += ` => ${context.dest}`; - ObjectDefineProperty(this, 'message', { - value: message, - enumerable: false, - writable: true, - configurable: true - }); - addCodeToName(this, 'SystemError', key); + captureLargerStackTrace(this); this.code = key; - ObjectDefineProperty(this, 'info', { - value: context, - enumerable: true, - configurable: true, - writable: false - }); - - ObjectDefineProperty(this, 'errno', { - get() { - return context.errno; + ObjectDefineProperties(this, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, }, - set: (value) => { - context.errno = value; + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, }, - enumerable: true, - configurable: true - }); - - ObjectDefineProperty(this, 'syscall', { - get() { - return context.syscall; + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, }, - set: (value) => { - context.syscall = value; + errno: { + get() { + return context.errno; + }, + set: (value) => { + context.errno = value; + }, + enumerable: true, + configurable: true, + }, + syscall: { + get() { + return context.syscall; + }, + set: (value) => { + context.syscall = value; + }, + enumerable: true, + configurable: true, }, - enumerable: true, - configurable: true }); if (context.path !== undefined) { @@ -346,21 +345,29 @@ function makeNodeErrorWithCode(Base, key) { // Reset the limit and setting the name property. if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = limit; const message = getMessage(key, args, error); - ObjectDefineProperty(error, 'message', { - value: message, - enumerable: false, - writable: true, - configurable: true, - }); - ObjectDefineProperty(error, 'toString', { - value() { - return `${this.name} [${key}]: ${this.message}`; + ObjectDefineProperties(error, { + [kIsNodeError]: { + value: true, + enumerable: false, + writable: false, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + toString: { + value() { + return `${this.name} [${key}]: ${this.message}`; + }, + enumerable: false, + writable: true, + configurable: true, }, - enumerable: false, - writable: true, - configurable: true, }); - addCodeToName(error, Base.name, key); + captureLargerStackTrace(error); error.code = key; return error; }; @@ -792,7 +799,6 @@ class AbortError extends Error { } } module.exports = { - addCodeToName, // Exported for NghttpError aggregateTwoErrors, codes, dnsException, @@ -815,7 +821,9 @@ module.exports = { maybeOverridePrepareStackTrace, overrideStackTrace, kEnhanceStackBeforeInspector, - fatalExceptionStackEnhancers + fatalExceptionStackEnhancers, + kIsNodeError, + captureLargerStackTrace, }; // To declare an error message, use the E(sym, val, def) function above. The sym diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index f8252fffba65f5..78ff2937c3a317 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -9,6 +9,7 @@ const { MathMax, Number, ObjectCreate, + ObjectDefineProperty, ObjectKeys, SafeSet, String, @@ -28,9 +29,10 @@ const { ERR_INVALID_ARG_TYPE, ERR_INVALID_HTTP_TOKEN }, - addCodeToName, + captureLargerStackTrace, getMessage, - hideStackFrames + hideStackFrames, + kIsNodeError, } = require('internal/errors'); const kSensitiveHeaders = Symbol('nodejs.http2.sensitiveHeaders'); @@ -550,7 +552,13 @@ class NghttpError extends Error { binding.nghttp2ErrorString(integerCode)); this.code = customErrorCode || 'ERR_HTTP2_ERROR'; this.errno = integerCode; - addCodeToName(this, super.name, this.code); + captureLargerStackTrace(this); + ObjectDefineProperty(this, kIsNodeError, { + value: true, + enumerable: false, + writable: false, + configurable: true, + }); } toString() { diff --git a/lib/internal/source_map/prepare_stack_trace.js b/lib/internal/source_map/prepare_stack_trace.js index 6b8d4e566ff1b1..9502cfef6fe029 100644 --- a/lib/internal/source_map/prepare_stack_trace.js +++ b/lib/internal/source_map/prepare_stack_trace.js @@ -21,7 +21,8 @@ const { findSourceMap } = require('internal/source_map/source_map_cache'); const { kNoOverride, overrideStackTrace, - maybeOverridePrepareStackTrace + maybeOverridePrepareStackTrace, + kIsNodeError, } = require('internal/errors'); const { fileURLToPath } = require('internal/url'); @@ -41,7 +42,12 @@ const prepareStackTrace = (globalThis, error, trace) => { maybeOverridePrepareStackTrace(globalThis, error, trace); if (globalOverride !== kNoOverride) return globalOverride; - const errorString = ErrorPrototypeToString(error); + let errorString; + if (kIsNodeError in error) { + errorString = `${error.name} [${error.code}]: ${error.message}`; + } else { + errorString = ErrorPrototypeToString(error); + } if (trace.length === 0) { return errorString; diff --git a/test/message/esm_loader_not_found.out b/test/message/esm_loader_not_found.out index d2329d7c77ad86..61b1623cdf176f 100644 --- a/test/message/esm_loader_not_found.out +++ b/test/message/esm_loader_not_found.out @@ -1,8 +1,8 @@ (node:*) ExperimentalWarning: --experimental-loader is an experimental feature. This feature could change at any time (Use `* --trace-warnings ...` to show where the warning was created) -node:internal/process/esm_loader:* - internalBinding('errors').triggerUncaughtException( - ^ +node:internal/errors:* + ErrorCaptureStackTrace(err); + ^ Error [ERR_MODULE_NOT_FOUND]: Cannot find package 'i-dont-exist' imported from * at new NodeError (node:internal/errors:*:*) at packageResolve (node:internal/modules/esm/resolve:*:*) diff --git a/test/parallel/test-repl-top-level-await.js b/test/parallel/test-repl-top-level-await.js index 319633838bd358..88446a3a447f37 100644 --- a/test/parallel/test-repl-top-level-await.js +++ b/test/parallel/test-repl-top-level-await.js @@ -175,15 +175,17 @@ async function ordinaryTests() { async function ctrlCTest() { console.log('Testing Ctrl+C'); - assert.deepStrictEqual(await runAndWait([ + const output = await runAndWait([ 'await new Promise(() => {})', { ctrl: true, name: 'c' }, - ]), [ + ]); + assert.deepStrictEqual(output.slice(0, 3), [ 'await new Promise(() => {})\r', 'Uncaught:', - '[Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' + - 'Script execution was interrupted by `SIGINT`] {', - " code: 'ERR_SCRIPT_EXECUTION_INTERRUPTED'", + 'Error [ERR_SCRIPT_EXECUTION_INTERRUPTED]: ' + + 'Script execution was interrupted by `SIGINT`', + ]); + assert.deepStrictEqual(output.slice(-2), [ '}', PROMPT, ]); From b8860f35c9cdf87c855b414752c82b621c3c69b6 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 3 Jul 2021 10:18:24 -0700 Subject: [PATCH 085/133] doc: remove GitHub mark Judging from https://github.com/logos, we are misusing the GitHub mark. That page indicates to not change the color (we change it from black to green). Less clear, it says "Use the Mark in social buttons to link to your GitHub profile or project" which isn't exactly what we're doing but also isn't not what we're doing? This might be an indication of my eyesight getting worse as I get older, but I think the size that we display the mark at by default makes it not entirely recognizable as the GitHub mark. Lastly, there's the philosophical issue of whether we should display a commercial entity's mark on our web pages in this particular instance. (For me, the answer is "maybe".) All told, I think we can remove it without damaging usability on the website, so let's do it. PR-URL: https://github.com/nodejs/node/pull/39251 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell --- doc/api_assets/style.css | 5 ----- tools/doc/html.mjs | 4 +--- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/doc/api_assets/style.css b/doc/api_assets/style.css index c1f648eec9ddd3..b1c8a763a638aa 100644 --- a/doc/api_assets/style.css +++ b/doc/api_assets/style.css @@ -681,11 +681,6 @@ kbd { visibility: hidden; } -.github_icon { - vertical-align: middle; - margin: -2px 3px 0 0; -} - /* API reference sidebar */ @media only screen and (min-width: 1025px) { .apidoc #column2 > .line { diff --git a/tools/doc/html.mjs b/tools/doc/html.mjs index d1662a08c44924..c2c3a7a1836e88 100644 --- a/tools/doc/html.mjs +++ b/tools/doc/html.mjs @@ -489,8 +489,6 @@ function altDocs(filename, docCreated, versions) { ` : ''; } -// eslint-disable-next-line max-len -const githubLogo = ''; function editOnGitHub(filename) { - return `
  • ${githubLogo}Edit on GitHub
  • `; + return `
  • Edit on GitHub
  • `; } From 8e77aa23f126ad10aeeb3288a1c45b5dd64ef663 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 5 Jul 2021 07:55:49 -0700 Subject: [PATCH 086/133] test: add test for debugger restart message issue Running "restart" in the debugger confusingly prints an out-of-date "Debugger listening on..." message before printing a second updated one. Refs: https://github.com/nodejs/node/issues/39272 PR-URL: https://github.com/nodejs/node/pull/39273 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- .../test-debugger-restart-message.js | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 test/known_issues/test-debugger-restart-message.js diff --git a/test/known_issues/test-debugger-restart-message.js b/test/known_issues/test-debugger-restart-message.js new file mode 100644 index 00000000000000..478806effbb39c --- /dev/null +++ b/test/known_issues/test-debugger-restart-message.js @@ -0,0 +1,57 @@ +'use strict'; + +// Refs: https://github.com/nodejs/node/issues/39272 + +const common = require('../common'); + +const assert = require('assert'); + +// When this is moved out of known_issues, this skip can be removed. +if (common.isOSX) { + assert.fail('does not fail reliably on macOS in CI'); +} + +// When this is moved out of known_issues, this can be removed and replaced with +// the commented-out use of common.skipIfInspectorDisabled() below. +if (!process.features.inspector) { + assert.fail('Known issues test should fail, so if the inspector is disabled'); +} + +// Will need to uncomment this when moved out of known_issues. +// common.skipIfInspectorDisabled(); + +// This can be reduced to 2 or even 1 (and the loop removed) once the debugger +// is fixed. It's set higher to make sure that the error is tripped reliably +// in CI. On most systems, the error will be tripped on the first test, but +// on a few platforms in CI, it needs to be many times. +const RESTARTS = 16; + +const fixtures = require('../common/fixtures'); +const startCLI = require('../common/debugger'); + +// Using `restart` should result in only one "Connect/For help" message. +{ + const script = fixtures.path('debugger', 'three-lines.js'); + const cli = startCLI([script]); + + function onFatal(error) { + cli.quit(); + throw error; + } + + const listeningRegExp = /Debugger listening on/g; + + cli.waitForInitialBreak() + .then(() => cli.waitForPrompt()) + .then(() => { + assert.strictEqual(cli.output.match(listeningRegExp).length, 1); + }) + .then(async () => { + for (let i = 0; i < RESTARTS; i++) { + await cli.stepCommand('restart'); + assert.strictEqual(cli.output.match(listeningRegExp).length, 1); + } + }) + .then(() => cli.quit()) + .then(null, onFatal); +} From 81df9b1e92f067766f76a2f276a1b896bcd1d8b8 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 4 Jul 2021 18:18:36 -0700 Subject: [PATCH 087/133] doc: update collaborator email address MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39263 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 1ece184a4952c0..967640ef2a4b20 100644 --- a/README.md +++ b/README.md @@ -316,7 +316,7 @@ For information about the governance of the Node.js project, see * [gabrielschulhof](https://github.com/gabrielschulhof) - **Gabriel Schulhof** <gabrielschulhof@gmail.com> * [gdams](https://github.com/gdams) - -**George Adams** <george.adams@uk.ibm.com> (he/him) +**George Adams** <george.adams@microsoft.com> (he/him) * [geek](https://github.com/geek) - **Wyatt Preul** <wpreul@gmail.com> * [gengjiawen](https://github.com/gengjiawen) - From 3cd9f5e2986607915d7e4ef81f0832ce3072f06c Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sun, 4 Jul 2021 18:10:20 -0700 Subject: [PATCH 088/133] tools: add find-inactive-collaborators.js MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The plan is to eventually call this script with a scheduled GitHub Action that could automatically open pull requests to move collaborators to emeritus status after (for example) a year of inactivity. Sample run: ``` $ node tools/find-inactive-collaborators.mjs '30 months ago' 864 authors have made commits since 30 months ago. 101 landers have landed commits since 30 months ago. 146 reviewers have approved landed commits since 30 months ago. 109 collaborators currently in the project. Inactive collaborators: Thomas Watson $ ``` PR-URL: https://github.com/nodejs/node/pull/39262 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen --- tools/find-inactive-collaborators.mjs | 95 +++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100755 tools/find-inactive-collaborators.mjs diff --git a/tools/find-inactive-collaborators.mjs b/tools/find-inactive-collaborators.mjs new file mode 100755 index 00000000000000..578adf05fb78c7 --- /dev/null +++ b/tools/find-inactive-collaborators.mjs @@ -0,0 +1,95 @@ +#!/usr/bin/env node + +// Identify inactive collaborators. "Inactive" is not quite right, as the things +// this checks for are not the entirety of collaborator activities. Still, it is +// a pretty good proxy. Feel free to suggest or implement further metrics. + +import cp from 'node:child_process'; +import fs from 'node:fs'; +import readline from 'node:readline'; + +const SINCE = process.argv[2] || '6 months ago'; + +async function runGitCommand(cmd, mapFn) { + const childProcess = cp.spawn('/bin/sh', ['-c', cmd], { + cwd: new URL('..', import.meta.url), + encoding: 'utf8', + stdio: ['inherit', 'pipe', 'inherit'], + }); + const lines = readline.createInterface({ + input: childProcess.stdout, + }); + const errorHandler = new Promise( + (_, reject) => childProcess.on('error', reject) + ); + const returnedSet = new Set(); + await Promise.race([errorHandler, Promise.resolve()]); + for await (const line of lines) { + await Promise.race([errorHandler, Promise.resolve()]); + const val = mapFn(line); + if (val) { + returnedSet.add(val); + } + } + return Promise.race([errorHandler, Promise.resolve(returnedSet)]); +} + +// Get all commit authors during the time period. +const authors = await runGitCommand( + `git shortlog -n -s --since="${SINCE}"`, + (line) => line.trim().split('\t', 2)[1] +); + +// Get all commit landers during the time period. +const landers = await runGitCommand( + `git shortlog -n -s -c --since="${SINCE}"`, + (line) => line.trim().split('\t', 2)[1] +); + +// Get all approving reviewers of landed commits during the time period. +const approvingReviewers = await runGitCommand( + `git log --since="${SINCE}" | egrep "^ Reviewed-By: "`, + (line) => /^ Reviewed-By: ([^<]+)/.exec(line)[1].trim() +); + +async function retrieveCollaboratorsFromReadme() { + const readmeText = readline.createInterface({ + input: fs.createReadStream(new URL('../README.md', import.meta.url)), + crlfDelay: Infinity, + }); + const returnedArray = []; + let processingCollaborators = false; + for await (const line of readmeText) { + const isCollaborator = processingCollaborators && line.length; + if (line === '### Collaborators') { + processingCollaborators = true; + } + if (line === '### Collaborator emeriti') { + processingCollaborators = false; + break; + } + if (line.startsWith('**') && isCollaborator) { + returnedArray.push(line.split('**', 2)[1].trim()); + } + } + return returnedArray; +} + +// Get list of current collaborators from README.md. +const collaborators = await retrieveCollaboratorsFromReadme(); + +console.log(`${authors.size.toLocaleString()} authors have made commits since ${SINCE}.`); +console.log(`${landers.size.toLocaleString()} landers have landed commits since ${SINCE}.`); +console.log(`${approvingReviewers.size.toLocaleString()} reviewers have approved landed commits since ${SINCE}.`); +console.log(`${collaborators.length.toLocaleString()} collaborators currently in the project.`); + +const inactive = collaborators.filter((collaborator) => + !authors.has(collaborator) && + !landers.has(collaborator) && + !approvingReviewers.has(collaborator) +); + +if (inactive.length) { + console.log('\nInactive collaborators:'); + console.log(inactive.join('\n')); +} From c92b80e63178932496bed9fe7aa0b01e7a320a60 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 6 Jul 2021 06:12:50 -0700 Subject: [PATCH 089/133] doc: use "repository" instead of "repo" in onboarding.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39286 Reviewed-By: Tobias Nießen Reviewed-By: Luigi Pinca Reviewed-By: Michaël Zasso Reviewed-By: Evan Lucas Reviewed-By: James M Snell --- onboarding.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/onboarding.md b/onboarding.md index 52104b9fb26128..fddc6aa64f16b3 100644 --- a/onboarding.md +++ b/onboarding.md @@ -53,8 +53,8 @@ onboarding session. * Notifications: * Use [https://github.com/notifications](https://github.com/notifications) or set up email - * Watching the main repo will flood your inbox (several hundred notifications - on typical weekdays), so be prepared + * Watching the main repository will flood your inbox (several hundred + notifications on typical weekdays), so be prepared The project has two venues for real-time discussion: * [`#nodejs-dev`](https://openjs-foundation.slack.com/archives/C019Y2T6STH) on @@ -179,8 +179,8 @@ The project has two venues for real-time discussion: `7006` in the `PR_ID`. * The remaining elements on the form are typically unchanged. * If you need help with something CI-related: - * Use the [Build WG repo](https://github.com/nodejs/build) to file issues - for the Build WG members who maintain the CI infrastructure. + * Use the [Build WG repository](https://github.com/nodejs/build) to file + issues for the Build WG members who maintain the CI infrastructure. ## Landing PRs From 131d676f64047e5d059e63a3678c73a9f3271927 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Tue, 6 Jul 2021 06:25:51 -0700 Subject: [PATCH 090/133] doc: simplify CRAN mirror text in benchmark guide MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39287 Reviewed-By: Michaël Zasso Reviewed-By: Luigi Pinca Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- doc/guides/writing-and-running-benchmarks.md | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/doc/guides/writing-and-running-benchmarks.md b/doc/guides/writing-and-running-benchmarks.md index 7e00e65a7b8b67..1198abe19bf921 100644 --- a/doc/guides/writing-and-running-benchmarks.md +++ b/doc/guides/writing-and-running-benchmarks.md @@ -74,11 +74,8 @@ install.packages("ggplot2") install.packages("plyr") ``` -In the event that a message is reported stating that a CRAN mirror must be -selected first, specify a mirror by adding in the repo parameter. - -If we used the "" mirror, it could look something -like this: +If a message states that a CRAN mirror must be selected first, specify a mirror +with the `repo` parameter. ```r install.packages("ggplot2", repo="http://cran.us.r-project.org") From 2d552a32d67e3c96562fd328ac45f15ac5bd9ae6 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 8 Jul 2021 06:46:16 -0700 Subject: [PATCH 091/133] doc: move ofrobots to collaborator emeritus MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Ali indicated in email that it makes sense to move him to collaborator emeritus at this time. PR-URL: https://github.com/nodejs/node/pull/39307 Reviewed-By: Richard Lau Reviewed-By: James M Snell Reviewed-By: Gireesh Punathil Reviewed-By: Gerhard Stöbich --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 967640ef2a4b20..404b8336ffa63f 100644 --- a/README.md +++ b/README.md @@ -387,8 +387,6 @@ For information about the governance of the Node.js project, see **Brian White** <mscdex@mscdex.net> * [MylesBorins](https://github.com/MylesBorins) - **Myles Borins** <myles.borins@gmail.com> (he/him) -* [ofrobots](https://github.com/ofrobots) - -**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) * [oyyd](https://github.com/oyyd) - **Ouyang Yadong** <oyydoibh@gmail.com> (he/him) * [panva](https://github.com/panva) - @@ -536,6 +534,8 @@ For information about the governance of the Node.js project, see **Chen Gang** <gangc.cxy@foxmail.com> * [not-an-aardvark](https://github.com/not-an-aardvark) - **Teddy Katz** <teddy.katz@gmail.com> (he/him) +* [ofrobots](https://github.com/ofrobots) - +**Ali Ijaz Sheikh** <ofrobots@google.com> (he/him) * [Olegas](https://github.com/Olegas) - **Oleg Elifantiev** <oleg@elifantiev.ru> * [orangemocha](https://github.com/orangemocha) - From a101fe68adea9da65c42ad4ff2c99c6c3d1078b9 Mon Sep 17 00:00:00 2001 From: ejose19 <8742215+ejose19@users.noreply.github.com> Date: Sun, 4 Jul 2021 23:42:06 -0300 Subject: [PATCH 092/133] repl: correctly hoist top level await declarations PR-URL: https://github.com/nodejs/node/pull/39265 Reviewed-By: Guy Bedford Reviewed-By: James M Snell --- lib/internal/repl/await.js | 94 ++++++++++++++++--- .../test-repl-preprocess-top-level-await.js | 77 ++++++++++++--- 2 files changed, 147 insertions(+), 24 deletions(-) diff --git a/lib/internal/repl/await.js b/lib/internal/repl/await.js index 09547117a6565a..3d0caa17650a04 100644 --- a/lib/internal/repl/await.js +++ b/lib/internal/repl/await.js @@ -3,6 +3,7 @@ const { ArrayFrom, ArrayPrototypeForEach, + ArrayPrototypeIncludes, ArrayPrototypeJoin, ArrayPrototypePop, ArrayPrototypePush, @@ -22,12 +23,21 @@ const parser = require('internal/deps/acorn/acorn/dist/acorn').Parser; const walk = require('internal/deps/acorn/acorn-walk/dist/walk'); const { Recoverable } = require('internal/repl'); +function isTopLevelDeclaration(state) { + return state.ancestors[state.ancestors.length - 2] === state.body; +} + const noop = FunctionPrototype; const visitorsWithoutAncestors = { ClassDeclaration(node, state, c) { - if (state.ancestors[state.ancestors.length - 2] === state.body) { + if (isTopLevelDeclaration(state)) { state.prepend(node, `${node.id.name}=`); + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `let ${node.id.name}; ` + ); } + walk.base.ClassDeclaration(node, state, c); }, ForOfStatement(node, state, c) { @@ -38,6 +48,10 @@ const visitorsWithoutAncestors = { }, FunctionDeclaration(node, state, c) { state.prepend(node, `${node.id.name}=`); + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `var ${node.id.name}; ` + ); }, FunctionExpression: noop, ArrowFunctionExpression: noop, @@ -51,22 +65,72 @@ const visitorsWithoutAncestors = { walk.base.ReturnStatement(node, state, c); }, VariableDeclaration(node, state, c) { - if (node.kind === 'var' || - state.ancestors[state.ancestors.length - 2] === state.body) { - if (node.declarations.length === 1) { - state.replace(node.start, node.start + node.kind.length, 'void'); - } else { - state.replace(node.start, node.start + node.kind.length, 'void ('); + const variableKind = node.kind; + const isIterableForDeclaration = ArrayPrototypeIncludes( + ['ForOfStatement', 'ForInStatement'], + state.ancestors[state.ancestors.length - 2].type + ); + + if (variableKind === 'var' || isTopLevelDeclaration(state)) { + state.replace( + node.start, + node.start + variableKind.length + (isIterableForDeclaration ? 1 : 0), + variableKind === 'var' && isIterableForDeclaration ? + '' : + 'void' + (node.declarations.length === 1 ? '' : ' (') + ); + + if (!isIterableForDeclaration) { + ArrayPrototypeForEach(node.declarations, (decl) => { + state.prepend(decl, '('); + state.append(decl, decl.init ? ')' : '=undefined)'); + }); + + if (node.declarations.length !== 1) { + state.append(node.declarations[node.declarations.length - 1], ')'); + } + } + + const variableIdentifiersToHoist = [ + ['var', []], + ['let', []], + ]; + function registerVariableDeclarationIdentifiers(node) { + switch (node.type) { + case 'Identifier': + ArrayPrototypePush( + variableIdentifiersToHoist[variableKind === 'var' ? 0 : 1][1], + node.name + ); + break; + case 'ObjectPattern': + ArrayPrototypeForEach(node.properties, (property) => { + registerVariableDeclarationIdentifiers(property.value); + }); + break; + case 'ArrayPattern': + ArrayPrototypeForEach(node.elements, (element) => { + registerVariableDeclarationIdentifiers(element); + }); + break; + } } ArrayPrototypeForEach(node.declarations, (decl) => { - state.prepend(decl, '('); - state.append(decl, decl.init ? ')' : '=undefined)'); + registerVariableDeclarationIdentifiers(decl.id); }); - if (node.declarations.length !== 1) { - state.append(node.declarations[node.declarations.length - 1], ')'); - } + ArrayPrototypeForEach( + variableIdentifiersToHoist, + ({ 0: kind, 1: identifiers }) => { + if (identifiers.length > 0) { + ArrayPrototypePush( + state.hoistedDeclarationStatements, + `${kind} ${ArrayPrototypeJoin(identifiers, ', ')}; ` + ); + } + } + ); } walk.base.VariableDeclaration(node, state, c); @@ -128,6 +192,7 @@ function processTopLevelAwait(src) { const state = { body, ancestors: [], + hoistedDeclarationStatements: [], replace(from, to, str) { for (let i = from; i < to; i++) { wrappedArray[i] = ''; @@ -172,7 +237,10 @@ function processTopLevelAwait(src) { state.append(last.expression, ')'); } - return ArrayPrototypeJoin(wrappedArray, ''); + return ( + ArrayPrototypeJoin(state.hoistedDeclarationStatements, '') + + ArrayPrototypeJoin(wrappedArray, '') + ); } module.exports = { diff --git a/test/parallel/test-repl-preprocess-top-level-await.js b/test/parallel/test-repl-preprocess-top-level-await.js index ed1fe90e43e459..3ec4da7e8fb72f 100644 --- a/test/parallel/test-repl-preprocess-top-level-await.js +++ b/test/parallel/test-repl-preprocess-top-level-await.js @@ -29,38 +29,93 @@ const testCases = [ [ 'await 0; return 0;', null ], [ 'var a = await 1', - '(async () => { void (a = await 1) })()' ], + 'var a; (async () => { void (a = await 1) })()' ], [ 'let a = await 1', - '(async () => { void (a = await 1) })()' ], + 'let a; (async () => { void (a = await 1) })()' ], [ 'const a = await 1', - '(async () => { void (a = await 1) })()' ], + 'let a; (async () => { void (a = await 1) })()' ], [ 'for (var i = 0; i < 1; ++i) { await i }', - '(async () => { for (void (i = 0); i < 1; ++i) { await i } })()' ], + 'var i; (async () => { for (void (i = 0); i < 1; ++i) { await i } })()' ], [ 'for (let i = 0; i < 1; ++i) { await i }', '(async () => { for (let i = 0; i < 1; ++i) { await i } })()' ], [ 'var {a} = {a:1}, [b] = [1], {c:{d}} = {c:{d: await 1}}', - '(async () => { void ( ({a} = {a:1}), ([b] = [1]), ' + + 'var a, b, d; (async () => { void ( ({a} = {a:1}), ([b] = [1]), ' + '({c:{d}} = {c:{d: await 1}})) })()' ], + [ 'let [a, b, c] = await ([1, 2, 3])', + 'let a, b, c; (async () => { void ([a, b, c] = await ([1, 2, 3])) })()'], + [ 'let {a,b,c} = await ({a: 1, b: 2, c: 3})', + 'let a, b, c; (async () => { void ({a,b,c} = ' + + 'await ({a: 1, b: 2, c: 3})) })()'], + [ 'let {a: [b]} = {a: [await 1]}, [{d}] = [{d: 3}]', + 'let b, d; (async () => { void ( ({a: [b]} = {a: [await 1]}),' + + ' ([{d}] = [{d: 3}])) })()'], /* eslint-disable no-template-curly-in-string */ [ 'console.log(`${(await { a: 1 }).a}`)', '(async () => { return (console.log(`${(await { a: 1 }).a}`)) })()' ], /* eslint-enable no-template-curly-in-string */ [ 'await 0; function foo() {}', - '(async () => { await 0; foo=function foo() {} })()' ], + 'var foo; (async () => { await 0; foo=function foo() {} })()' ], [ 'await 0; class Foo {}', - '(async () => { await 0; Foo=class Foo {} })()' ], + 'let Foo; (async () => { await 0; Foo=class Foo {} })()' ], [ 'if (await true) { function foo() {} }', - '(async () => { if (await true) { foo=function foo() {} } })()' ], + 'var foo; (async () => { if (await true) { foo=function foo() {} } })()' ], [ 'if (await true) { class Foo{} }', '(async () => { if (await true) { class Foo{} } })()' ], [ 'if (await true) { var a = 1; }', - '(async () => { if (await true) { void (a = 1); } })()' ], + 'var a; (async () => { if (await true) { void (a = 1); } })()' ], [ 'if (await true) { let a = 1; }', '(async () => { if (await true) { let a = 1; } })()' ], [ 'var a = await 1; let b = 2; const c = 3;', - '(async () => { void (a = await 1); void (b = 2); void (c = 3); })()' ], + 'var a; let b; let c; (async () => { void (a = await 1); void (b = 2);' + + ' void (c = 3); })()' ], [ 'let o = await 1, p', - '(async () => { void ( (o = await 1), (p=undefined)) })()' ], + 'let o, p; (async () => { void ( (o = await 1), (p=undefined)) })()' ], + [ 'await (async () => { let p = await 1; return p; })()', + '(async () => { return (await (async () => ' + + '{ let p = await 1; return p; })()) })()' ], + [ '{ let p = await 1; }', + '(async () => { { let p = await 1; } })()' ], + [ 'var p = await 1', + 'var p; (async () => { void (p = await 1) })()' ], + [ 'await (async () => { var p = await 1; return p; })()', + '(async () => { return (await (async () => ' + + '{ var p = await 1; return p; })()) })()' ], + [ '{ var p = await 1; }', + 'var p; (async () => { { void (p = await 1); } })()' ], + [ 'for await (var i of asyncIterable) { i; }', + 'var i; (async () => { for await (i of asyncIterable) { i; } })()'], + [ 'for await (var [i] of asyncIterable) { i; }', + 'var i; (async () => { for await ([i] of asyncIterable) { i; } })()'], + [ 'for await (var {i} of asyncIterable) { i; }', + 'var i; (async () => { for await ({i} of asyncIterable) { i; } })()'], + [ 'for await (var [{i}, [j]] of asyncIterable) { i; }', + 'var i, j; (async () => { for await ([{i}, [j]] of asyncIterable)' + + ' { i; } })()'], + [ 'for await (let i of asyncIterable) { i; }', + '(async () => { for await (let i of asyncIterable) { i; } })()'], + [ 'for await (const i of asyncIterable) { i; }', + '(async () => { for await (const i of asyncIterable) { i; } })()'], + [ 'for (var i of [1,2,3]) { await 1; }', + 'var i; (async () => { for (i of [1,2,3]) { await 1; } })()'], + [ 'for (var [i] of [[1], [2]]) { await 1; }', + 'var i; (async () => { for ([i] of [[1], [2]]) { await 1; } })()'], + [ 'for (var {i} of [{i: 1}, {i: 2}]) { await 1; }', + 'var i; (async () => { for ({i} of [{i: 1}, {i: 2}]) { await 1; } })()'], + [ 'for (var [{i}, [j]] of [[{i: 1}, [2]]]) { await 1; }', + 'var i, j; (async () => { for ([{i}, [j]] of [[{i: 1}, [2]]])' + + ' { await 1; } })()'], + [ 'for (let i of [1,2,3]) { await 1; }', + '(async () => { for (let i of [1,2,3]) { await 1; } })()'], + [ 'for (const i of [1,2,3]) { await 1; }', + '(async () => { for (const i of [1,2,3]) { await 1; } })()'], + [ 'for (var i in {x:1}) { await 1 }', + 'var i; (async () => { for (i in {x:1}) { await 1 } })()'], + [ 'for (var [a,b] in {xy:1}) { await 1 }', + 'var a, b; (async () => { for ([a,b] in {xy:1}) { await 1 } })()'], + [ 'for (let i in {x:1}) { await 1 }', + '(async () => { for (let i in {x:1}) { await 1 } })()'], + [ 'for (const i in {x:1}) { await 1 }', + '(async () => { for (const i in {x:1}) { await 1 } })()'], ]; for (const [input, expected] of testCases) { From b168ec2a2a3a4bc3ae19f04e6b99957f3283b315 Mon Sep 17 00:00:00 2001 From: ejose19 <8742215+ejose19@users.noreply.github.com> Date: Tue, 6 Jul 2021 19:34:36 -0300 Subject: [PATCH 093/133] repl: processTopLevelAwait fallback error handling PR-URL: https://github.com/nodejs/node/pull/39290 Reviewed-By: Guy Bedford Reviewed-By: James M Snell --- lib/internal/repl/await.js | 5 ++- lib/repl.js | 36 ++++++++++++++++++++-- test/parallel/test-repl-top-level-await.js | 30 ++++++++++++++++++ 3 files changed, 68 insertions(+), 3 deletions(-) diff --git a/lib/internal/repl/await.js b/lib/internal/repl/await.js index 3d0caa17650a04..e36fa5bfd735b1 100644 --- a/lib/internal/repl/await.js +++ b/lib/internal/repl/await.js @@ -184,7 +184,10 @@ function processTopLevelAwait(src) { '^\n\n' + RegExpPrototypeSymbolReplace(/ \([^)]+\)/, e.message, ''); // V8 unexpected token errors include the token string. if (StringPrototypeEndsWith(message, 'Unexpected token')) - message += " '" + src[e.pos - wrapPrefix.length] + "'"; + message += " '" + + // Wrapper end may cause acorn to report error position after the source + (src[e.pos - wrapPrefix.length] ?? src[src.length - 1]) + + "'"; // eslint-disable-next-line no-restricted-syntax throw new SyntaxError(message); } diff --git a/lib/repl.js b/lib/repl.js index 74fee4c9434129..c73ad5d9d61ade 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -78,6 +78,7 @@ const { ReflectApply, RegExp, RegExpPrototypeExec, + RegExpPrototypeSymbolReplace, RegExpPrototypeTest, SafeSet, SafeWeakSet, @@ -434,8 +435,39 @@ function REPLServer(prompt, awaitPromise = true; } } catch (e) { - decorateErrorStack(e); - err = e; + let recoverableError = false; + if (e.name === 'SyntaxError') { + let parentURL; + try { + const { pathToFileURL } = require('url'); + // Adding `/repl` prevents dynamic imports from loading relative + // to the parent of `process.cwd()`. + parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href; + } catch { + } + + // Remove all "await"s and attempt running the script + // in order to detect if error is truly non recoverable + const fallbackCode = RegExpPrototypeSymbolReplace(/\bawait\b/g, code, ''); + try { + vm.createScript(fallbackCode, { + filename: file, + displayErrors: true, + importModuleDynamically: async (specifier) => { + return asyncESM.ESMLoader.import(specifier, parentURL); + } + }); + } catch (fallbackError) { + if (isRecoverableError(fallbackError, fallbackCode)) { + recoverableError = true; + err = new Recoverable(e); + } + } + } + if (!recoverableError) { + decorateErrorStack(e); + err = e; + } } } diff --git a/test/parallel/test-repl-top-level-await.js b/test/parallel/test-repl-top-level-await.js index 88446a3a447f37..1388ce9334c883 100644 --- a/test/parallel/test-repl-top-level-await.js +++ b/test/parallel/test-repl-top-level-await.js @@ -152,6 +152,36 @@ async function ordinaryTests() { 'Unexpected token \'.\'', ], ], + ['for (const x of [1,2,3]) {\nawait x\n}', [ + 'for (const x of [1,2,3]) {\r', + '... await x\r', + '... }\r', + 'undefined', + ]], + ['for (const x of [1,2,3]) {\nawait x;\n}', [ + 'for (const x of [1,2,3]) {\r', + '... await x;\r', + '... }\r', + 'undefined', + ]], + ['for await (const x of [1,2,3]) {\nconsole.log(x)\n}', [ + 'for await (const x of [1,2,3]) {\r', + '... console.log(x)\r', + '... }\r', + '1', + '2', + '3', + 'undefined', + ]], + ['for await (const x of [1,2,3]) {\nconsole.log(x);\n}', [ + 'for await (const x of [1,2,3]) {\r', + '... console.log(x);\r', + '... }\r', + '1', + '2', + '3', + 'undefined', + ]], ]; for (const [input, expected = [`${input}\r`], options = {}] of testCases) { From b56a3d9009e4cab5a9a91420f1ab0aa2bdd9db23 Mon Sep 17 00:00:00 2001 From: Jiawen Geng Date: Wed, 7 Jul 2021 01:53:09 +0000 Subject: [PATCH 094/133] tools: update gyp-next to v0.9.3 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39291 Reviewed-By: Michaël Zasso Reviewed-By: Richard Lau Reviewed-By: Colin Ihrig --- tools/gyp/CHANGELOG.md | 14 ++++++++++++++ tools/gyp/pylib/gyp/MSVSUtil.py | 2 +- tools/gyp/pylib/gyp/common.py | 2 +- tools/gyp/pylib/gyp/easy_xml.py | 11 ++++++++--- tools/gyp/pylib/gyp/generator/android.py | 6 +++--- tools/gyp/pylib/gyp/generator/make.py | 2 +- tools/gyp/pylib/gyp/generator/msvs.py | 6 +++--- tools/gyp/pylib/gyp/generator/ninja.py | 3 +-- tools/gyp/pylib/gyp/input.py | 2 +- tools/gyp/pylib/gyp/msvs_emulation.py | 4 ++-- tools/gyp/pylib/gyp/win_tool.py | 5 +++-- tools/gyp/pylib/gyp/xcodeproj_file.py | 8 ++++---- tools/gyp/setup.py | 2 +- tools/gyp/test_gyp.py | 21 +++++++-------------- 14 files changed, 50 insertions(+), 38 deletions(-) diff --git a/tools/gyp/CHANGELOG.md b/tools/gyp/CHANGELOG.md index 6d66b3acd22d5a..d84ee08238d37f 100644 --- a/tools/gyp/CHANGELOG.md +++ b/tools/gyp/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +### [0.9.3](https://www.github.com/nodejs/gyp-next/compare/v0.9.2...v0.9.3) (2021-07-07) + + +### Bug Fixes + +* build failure with ninja and Python 3 on Windows ([#113](https://www.github.com/nodejs/gyp-next/issues/113)) ([c172d10](https://www.github.com/nodejs/gyp-next/commit/c172d105deff5db4244e583942215918fa80dd3c)) + +### [0.9.2](https://www.github.com/nodejs/gyp-next/compare/v0.9.1...v0.9.2) (2021-05-21) + + +### Bug Fixes + +* add support of utf8 encoding ([#105](https://www.github.com/nodejs/gyp-next/issues/105)) ([4d0f93c](https://www.github.com/nodejs/gyp-next/commit/4d0f93c249286d1f0c0f665f5fe7346119f98cf1)) + ### [0.9.1](https://www.github.com/nodejs/gyp-next/compare/v0.9.0...v0.9.1) (2021-05-14) diff --git a/tools/gyp/pylib/gyp/MSVSUtil.py b/tools/gyp/pylib/gyp/MSVSUtil.py index cb55305eaeed24..36bb782bd319a2 100644 --- a/tools/gyp/pylib/gyp/MSVSUtil.py +++ b/tools/gyp/pylib/gyp/MSVSUtil.py @@ -55,7 +55,7 @@ def _SuffixName(name, suffix): Target name with suffix added (foo_suffix#target) """ parts = name.rsplit("#", 1) - parts[0] = "{}_{}".format(parts[0], suffix) + parts[0] = f"{parts[0]}_{suffix}" return "#".join(parts) diff --git a/tools/gyp/pylib/gyp/common.py b/tools/gyp/pylib/gyp/common.py index ba310ce247f078..9213fcc5e82bb7 100644 --- a/tools/gyp/pylib/gyp/common.py +++ b/tools/gyp/pylib/gyp/common.py @@ -562,7 +562,7 @@ def pop(self, last=True): # pylint: disable=W0221 def __repr__(self): if not self: return f"{self.__class__.__name__}()" - return "{}({!r})".format(self.__class__.__name__, list(self)) + return f"{self.__class__.__name__}({list(self)!r})" def __eq__(self, other): if isinstance(other, OrderedSet): diff --git a/tools/gyp/pylib/gyp/easy_xml.py b/tools/gyp/pylib/gyp/easy_xml.py index e475b5530c1434..0c99e29ecf8ce0 100644 --- a/tools/gyp/pylib/gyp/easy_xml.py +++ b/tools/gyp/pylib/gyp/easy_xml.py @@ -2,6 +2,7 @@ # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. +import sys import re import os import locale @@ -84,7 +85,7 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): rest = specification[1:] if rest and isinstance(rest[0], dict): for at, val in sorted(rest[0].items()): - xml_parts.append(' {}="{}"'.format(at, _XmlEscape(val, attr=True))) + xml_parts.append(f' {at}="{_XmlEscape(val, attr=True)}"') rest = rest[1:] if rest: xml_parts.append(">") @@ -106,7 +107,8 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0): xml_parts.append("/>%s" % new_line) -def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False): +def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, + win32=(sys.platform == "win32")): """ Writes the XML content to disk, touching the file only if it has changed. Args: @@ -121,7 +123,10 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False, win32=False default_encoding = locale.getdefaultlocale()[1] if default_encoding and default_encoding.upper() != encoding.upper(): - xml_string = xml_string.encode(encoding) + if win32 and sys.version_info < (3, 7): + xml_string = xml_string.decode("cp1251").encode(encoding) + else: + xml_string = xml_string.encode(encoding) # Get the old content try: diff --git a/tools/gyp/pylib/gyp/generator/android.py b/tools/gyp/pylib/gyp/generator/android.py index 040d8088a24a03..cdf1a4832cf1ad 100644 --- a/tools/gyp/pylib/gyp/generator/android.py +++ b/tools/gyp/pylib/gyp/generator/android.py @@ -349,7 +349,7 @@ def WriteActions(self, actions, extra_sources, extra_outputs): for output in outputs[1:]: # Make each output depend on the main output, with an empty command # to force make to notice that the mtime has changed. - self.WriteLn("{}: {} ;".format(self.LocalPathify(output), main_output)) + self.WriteLn(f"{self.LocalPathify(output)}: {main_output} ;") extra_outputs += outputs self.WriteLn() @@ -616,7 +616,7 @@ def WriteSources(self, spec, configs, extra_sources): if IsCPPExtension(ext) and ext != local_cpp_extension: local_file = root + local_cpp_extension if local_file != source: - self.WriteLn("{}: {}".format(local_file, self.LocalPathify(source))) + self.WriteLn(f"{local_file}: {self.LocalPathify(source)}") self.WriteLn("\tmkdir -p $(@D); cp $< $@") origin_src_dirs.append(os.path.dirname(source)) final_generated_sources.append(local_file) @@ -908,7 +908,7 @@ def WriteTarget( if isinstance(v, list): self.WriteList(v, k) else: - self.WriteLn("{} := {}".format(k, make.QuoteIfNecessary(v))) + self.WriteLn(f"{k} := {make.QuoteIfNecessary(v)}") self.WriteLn("") # Add to the set of targets which represent the gyp 'all' target. We use the diff --git a/tools/gyp/pylib/gyp/generator/make.py b/tools/gyp/pylib/gyp/generator/make.py index eb9102dd15a810..c595f20fe2df12 100644 --- a/tools/gyp/pylib/gyp/generator/make.py +++ b/tools/gyp/pylib/gyp/generator/make.py @@ -2133,7 +2133,7 @@ def WriteSortedXcodeEnv(self, target, env): # export foo := a\ b # it does not -- the backslash is written to the env as literal character. # So don't escape spaces in |env[k]|. - self.WriteLn("{}: export {} := {}".format(QuoteSpaces(target), k, v)) + self.WriteLn(f"{QuoteSpaces(target)}: export {k} := {v}") def Objectify(self, path): """Convert a path to its output directory form.""" diff --git a/tools/gyp/pylib/gyp/generator/msvs.py b/tools/gyp/pylib/gyp/generator/msvs.py index 5435eb1e1f3317..31d5396fe56978 100644 --- a/tools/gyp/pylib/gyp/generator/msvs.py +++ b/tools/gyp/pylib/gyp/generator/msvs.py @@ -314,7 +314,7 @@ def _ConfigBaseName(config_name, platform_name): def _ConfigFullName(config_name, config_data): platform_name = _ConfigPlatform(config_data) - return "{}|{}".format(_ConfigBaseName(config_name, platform_name), platform_name) + return f"{_ConfigBaseName(config_name, platform_name)}|{platform_name}" def _ConfigWindowsTargetPlatformVersion(config_data, version): @@ -335,7 +335,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version): # Find a matching entry in sdk_dir\include. expected_sdk_dir = r"%s\include" % sdk_dir names = sorted( - [ + ( x for x in ( os.listdir(expected_sdk_dir) @@ -343,7 +343,7 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version): else [] ) if x.startswith(version) - ], + ), reverse=True, ) if names: diff --git a/tools/gyp/pylib/gyp/generator/ninja.py b/tools/gyp/pylib/gyp/generator/ninja.py index ca032aef20ff6a..b66e674a7b3871 100644 --- a/tools/gyp/pylib/gyp/generator/ninja.py +++ b/tools/gyp/pylib/gyp/generator/ninja.py @@ -638,7 +638,7 @@ def GenerateDescription(self, verb, message, fallback): if self.toolset != "target": verb += "(%s)" % self.toolset if message: - return "{} {}".format(verb, self.ExpandSpecial(message)) + return f"{verb} {self.ExpandSpecial(message)}" else: return f"{verb} {self.name}: {fallback}" @@ -2389,7 +2389,6 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name ) if flavor == "win": master_ninja.variable("ld_host", ld_host) - master_ninja.variable("ldxx_host", ldxx_host) else: master_ninja.variable( "ld_host", CommandWithWrapper("LINK", wrappers, ld_host) diff --git a/tools/gyp/pylib/gyp/input.py b/tools/gyp/pylib/gyp/input.py index ca7ce44eab87df..354958bfb2ab55 100644 --- a/tools/gyp/pylib/gyp/input.py +++ b/tools/gyp/pylib/gyp/input.py @@ -225,7 +225,7 @@ def LoadOneBuildFile(build_file_path, data, aux_data, includes, is_target, check return data[build_file_path] if os.path.exists(build_file_path): - build_file_contents = open(build_file_path).read() + build_file_contents = open(build_file_path, encoding='utf-8').read() else: raise GypError(f"{build_file_path} not found (cwd: {os.getcwd()})") diff --git a/tools/gyp/pylib/gyp/msvs_emulation.py b/tools/gyp/pylib/gyp/msvs_emulation.py index f744e38df1639b..6fcabd049d843c 100644 --- a/tools/gyp/pylib/gyp/msvs_emulation.py +++ b/tools/gyp/pylib/gyp/msvs_emulation.py @@ -333,7 +333,7 @@ def _TargetConfig(self, config): # first level is globally for the configuration (this is what we consider # "the" config at the gyp level, which will be something like 'Debug' or # 'Release'), VS2015 and later only use this level - if self.vs_version.short_name >= 2015: + if int(self.vs_version.short_name) >= 2015: return config # and a second target-specific configuration, which is an # override for the global one. |config| is remapped here to take into @@ -537,7 +537,7 @@ def GetCflags(self, config): ) ] ) - if self.vs_version.project_version >= 12.0: + if float(self.vs_version.project_version) >= 12.0: # New flag introduced in VS2013 (project version 12.0) Forces writes to # the program database (PDB) to be serialized through MSPDBSRV.EXE. # https://msdn.microsoft.com/en-us/library/dn502518.aspx diff --git a/tools/gyp/pylib/gyp/win_tool.py b/tools/gyp/pylib/gyp/win_tool.py index 4dbcda50a4c0c6..638eee40029411 100755 --- a/tools/gyp/pylib/gyp/win_tool.py +++ b/tools/gyp/pylib/gyp/win_tool.py @@ -221,8 +221,9 @@ def ExecLinkWithManifests( # and sometimes doesn't unfortunately. with open(our_manifest) as our_f: with open(assert_manifest) as assert_f: - our_data = our_f.read().translate(None, string.whitespace) - assert_data = assert_f.read().translate(None, string.whitespace) + translator = str.maketrans('', '', string.whitespace) + our_data = our_f.read().translate(translator) + assert_data = assert_f.read().translate(translator) if our_data != assert_data: os.unlink(out) diff --git a/tools/gyp/pylib/gyp/xcodeproj_file.py b/tools/gyp/pylib/gyp/xcodeproj_file.py index 5863ef45df2379..076eea37211179 100644 --- a/tools/gyp/pylib/gyp/xcodeproj_file.py +++ b/tools/gyp/pylib/gyp/xcodeproj_file.py @@ -299,8 +299,8 @@ def __repr__(self): try: name = self.Name() except NotImplementedError: - return "<{} at 0x{:x}>".format(self.__class__.__name__, id(self)) - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} at 0x{id(self):x}>" + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Copy(self): """Make a copy of this object. @@ -2251,7 +2251,7 @@ class PBXContainerItemProxy(XCObject): def __repr__(self): props = self._properties name = "{}.gyp:{}".format(props["containerPortal"].Name(), props["remoteInfo"]) - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Name(self): # Admittedly not the best name, but it's what Xcode uses. @@ -2288,7 +2288,7 @@ class PBXTargetDependency(XCObject): def __repr__(self): name = self._properties.get("name") or self._properties["target"].Name() - return "<{} {!r} at 0x{:x}>".format(self.__class__.__name__, name, id(self)) + return f"<{self.__class__.__name__} {name!r} at 0x{id(self):x}>" def Name(self): # Admittedly not the best name, but it's what Xcode uses. diff --git a/tools/gyp/setup.py b/tools/gyp/setup.py index f4a9481937547a..1ff298a12326b0 100644 --- a/tools/gyp/setup.py +++ b/tools/gyp/setup.py @@ -15,7 +15,7 @@ setup( name="gyp-next", - version="0.9.1", + version="0.9.3", description="A fork of the GYP build system for use in the Node.js projects", long_description=long_description, long_description_content_type="text/markdown", diff --git a/tools/gyp/test_gyp.py b/tools/gyp/test_gyp.py index 757d2fc0b0a16f..9ba264170f43ab 100755 --- a/tools/gyp/test_gyp.py +++ b/tools/gyp/test_gyp.py @@ -140,10 +140,7 @@ def main(argv=None): if not args.quiet: runner.print_results() - if runner.failures: - return 1 - else: - return 0 + return 1 if runner.failures else 0 def print_configuration_info(): @@ -152,8 +149,8 @@ def print_configuration_info(): sys.path.append(os.path.abspath("test/lib")) import TestMac - print(" Mac {} {}".format(platform.mac_ver()[0], platform.mac_ver()[2])) - print(" Xcode %s" % TestMac.Xcode.Version()) + print(f" Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}") + print(f" Xcode {TestMac.Xcode.Version()}") elif sys.platform == "win32": sys.path.append(os.path.abspath("pylib")) import gyp.MSVSVersion @@ -162,8 +159,8 @@ def print_configuration_info(): print(" MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description()) elif sys.platform in ("linux", "linux2"): print(" Linux %s" % " ".join(platform.linux_distribution())) - print(" Python %s" % platform.python_version()) - print(" PYTHONPATH=%s" % os.environ["PYTHONPATH"]) + print(f" Python {platform.python_version()}") + print(f" PYTHONPATH={os.environ['PYTHONPATH']}") print() @@ -222,13 +219,9 @@ def run_test(self, test, fmt, i): res_msg = f" {res} {took:.3f}s" self.print_(res_msg) - if ( - stdout - and not stdout.endswith("PASSED\n") - and not (stdout.endswith("NO RESULT\n")) - ): + if stdout and not stdout.endswith(("PASSED\n", "NO RESULT\n")): print() - print("\n".join(" %s" % line for line in stdout.splitlines())) + print("\n".join(f" {line}" for line in stdout.splitlines())) elif not self.isatty: print() From d101a85e36dc798c654c2acdac230c60463324ec Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 8 Jul 2021 15:29:52 -0700 Subject: [PATCH 095/133] doc: move AndreasMadsen to emeritus In private email, Andreas Madsen indicated it would be OK to move him to emeritus. PR-URL: https://github.com/nodejs/node/pull/39315 Reviewed-By: James M Snell Reviewed-By: Colin Ihrig Reviewed-By: Gireesh Punathil --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 404b8336ffa63f..224842687736d9 100644 --- a/README.md +++ b/README.md @@ -251,8 +251,6 @@ For information about the governance of the Node.js project, see **Antoine du Hamel** <duhamelantoine1995@gmail.com> (he/him) * [ak239](https://github.com/ak239) - **Aleksei Koziatinskii** <ak239spb@gmail.com> -* [AndreasMadsen](https://github.com/AndreasMadsen) - -**Andreas Madsen** <amwebdk@gmail.com> (he/him) * [antsmartian](https://github.com/antsmartian) - **Anto Aravinth** <anto.aravinth.cse@gmail.com> (he/him) * [apapirovski](https://github.com/apapirovski) - @@ -466,6 +464,8 @@ For information about the governance of the Node.js project, see **Andras** <andras@kinvey.com> * [AnnaMag](https://github.com/AnnaMag) - **Anna M. Kedzierska** <anna.m.kedzierska@gmail.com> +* [AndreasMadsen](https://github.com/AndreasMadsen) - +**Andreas Madsen** <amwebdk@gmail.com> (he/him) * [aqrln](https://github.com/aqrln) - **Alexey Orlenko** <eaglexrlnk@gmail.com> (he/him) * [bnoordhuis](https://github.com/bnoordhuis) - From b3a0dd1e4a96264cb46a93724af7aa9d8d26d8c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Mon, 5 Jul 2021 15:49:20 +0200 Subject: [PATCH 096/133] tools: pass bot token to node-pr-labeler MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This should allow workflows that use the `labeled` event to be run. PR-URL: https://github.com/nodejs/node/pull/39271 Reviewed-By: James M Snell Reviewed-By: Jiawen Geng Reviewed-By: Tobias Nießen --- .github/workflows/label-pr.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/label-pr.yml b/.github/workflows/label-pr.yml index 4e56676e41e5ff..58e9b226dab0d0 100644 --- a/.github/workflows/label-pr.yml +++ b/.github/workflows/label-pr.yml @@ -11,4 +11,5 @@ jobs: steps: - uses: nodejs/node-pr-labeler@v1 with: + repo-token: ${{ secrets.GH_USER_TOKEN }} configuration-path: .github/label-pr-config.yml From 11a8b81cafe0ddeca9a387b4599c3ec0be705838 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 7 Jul 2021 07:01:01 -0700 Subject: [PATCH 097/133] test: remove debugger workaround for AIX MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Optimistically removing workaround code in the debugger test tool. PR-URL: https://github.com/nodejs/node/pull/39296 Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Gireesh Punathil --- test/common/debugger.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/test/common/debugger.js b/test/common/debugger.js index c127b1dc292e7f..36d6328dcc9b39 100644 --- a/test/common/debugger.js +++ b/test/common/debugger.js @@ -23,10 +23,7 @@ function startCLI(args, flags = [], spawnOpts = {}) { if (this === child.stderr) { stderrOutput += chunk; } - // TODO(trott): Figure out why the "breakpoints restored." message appears - // in unpredictable places especially on AIX in CI. We shouldn't be - // excluding it, but it gets in the way of the output checking for tests. - outputBuffer.push(chunk.replace(/\n*\d+ breakpoints restored\.\n*/mg, '')); + outputBuffer.push(chunk); } function getOutput() { From 0ec93a1fc117433c131f0b3d0c3174cbf560b190 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 7 Jul 2021 09:45:04 -0700 Subject: [PATCH 098/133] test: use common.PORT instead of hardcoded port number PR-URL: https://github.com/nodejs/node/pull/39298 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell --- test/sequential/test-debugger-invalid-args.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/sequential/test-debugger-invalid-args.js b/test/sequential/test-debugger-invalid-args.js index 05c27b4a0ee20e..5275d2b1dac9bd 100644 --- a/test/sequential/test-debugger-invalid-args.js +++ b/test/sequential/test-debugger-invalid-args.js @@ -21,7 +21,7 @@ const { createServer } = require('net'); // Launch w/ invalid host:port. { - const cli = startCLI(['localhost:914']); + const cli = startCLI([`localhost:${common.PORT}`]); cli.quit() .then((code) => { assert.match( From 21f77031fb7748b3be413b64ae21b5817d3eefcb Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Wed, 7 Jul 2021 10:51:29 -0700 Subject: [PATCH 099/133] test: move debugger test case to parallel Move test case that does not require a predetermined port to parallel. PR-URL: https://github.com/nodejs/node/pull/39300 Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Zeyu Yang --- .../test-debugger-unavailable-port.js | 36 +++++++++++++++++++ test/sequential/test-debugger-invalid-args.js | 28 --------------- 2 files changed, 36 insertions(+), 28 deletions(-) create mode 100644 test/parallel/test-debugger-unavailable-port.js diff --git a/test/parallel/test-debugger-unavailable-port.js b/test/parallel/test-debugger-unavailable-port.js new file mode 100644 index 00000000000000..e2920312ffc21c --- /dev/null +++ b/test/parallel/test-debugger-unavailable-port.js @@ -0,0 +1,36 @@ +'use strict'; +const common = require('../common'); + +common.skipIfInspectorDisabled(); + +const fixtures = require('../common/fixtures'); +const startCLI = require('../common/debugger'); + +const assert = require('assert'); +const { createServer } = require('net'); + +// Launch w/ unavailable port. +(async () => { + const blocker = createServer((socket) => socket.end()); + const port = await new Promise((resolve, reject) => { + blocker.on('error', reject); + blocker.listen(0, '127.0.0.1', () => resolve(blocker.address().port)); + }); + + try { + const script = fixtures.path('debugger', 'three-lines.js'); + const cli = startCLI([`--port=${port}`, script]); + const code = await cli.quit(); + + assert.doesNotMatch( + cli.output, + /report this bug/, + 'Omits message about reporting this as a bug'); + assert.ok( + cli.output.includes(`waiting for 127.0.0.1:${port} to be free`), + 'Tells the user that the port wasn\'t available'); + assert.strictEqual(code, 1); + } finally { + blocker.close(); + } +})().then(common.mustCall()); diff --git a/test/sequential/test-debugger-invalid-args.js b/test/sequential/test-debugger-invalid-args.js index 5275d2b1dac9bd..36f8e588b04a6e 100644 --- a/test/sequential/test-debugger-invalid-args.js +++ b/test/sequential/test-debugger-invalid-args.js @@ -3,11 +3,9 @@ const common = require('../common'); common.skipIfInspectorDisabled(); -const fixtures = require('../common/fixtures'); const startCLI = require('../common/debugger'); const assert = require('assert'); -const { createServer } = require('net'); // Launch CLI w/o args. { @@ -31,29 +29,3 @@ const { createServer } = require('net'); assert.strictEqual(code, 1); }); } - -// Launch w/ unavailable port. -(async () => { - const blocker = createServer((socket) => socket.end()); - const port = await new Promise((resolve, reject) => { - blocker.on('error', reject); - blocker.listen(0, '127.0.0.1', () => resolve(blocker.address().port)); - }); - - try { - const script = fixtures.path('debugger', 'three-lines.js'); - const cli = startCLI([`--port=${port}`, script]); - const code = await cli.quit(); - - assert.doesNotMatch( - cli.output, - /report this bug/, - 'Omits message about reporting this as a bug'); - assert.ok( - cli.output.includes(`waiting for 127.0.0.1:${port} to be free`), - 'Tells the user that the port wasn\'t available'); - assert.strictEqual(code, 1); - } finally { - blocker.close(); - } -})().then(common.mustCall()); From 165130a3e0cdebdb46cabb61417d6f36e33302aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Tue, 29 Jun 2021 20:35:31 +0200 Subject: [PATCH 100/133] deps: patch V8 to 9.1.269.38 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://github.com/v8/v8/compare/9.1.269.36...9.1.269.38 Fixes: https://github.com/nodejs/node/issues/37553 PR-URL: https://github.com/nodejs/node/pull/39196 Reviewed-By: Richard Lau Reviewed-By: Gireesh Punathil Reviewed-By: Jiawen Geng Reviewed-By: Matteo Collina Reviewed-By: Tobias Nießen Reviewed-By: Colin Ihrig --- deps/v8/include/v8-version.h | 2 +- deps/v8/src/heap/heap.cc | 4 +++ deps/v8/src/heap/heap.h | 2 ++ deps/v8/src/json/json-parser.cc | 5 +++ deps/v8/src/wasm/wasm-js.cc | 59 ++++++++++++++++++++++----------- 5 files changed, 52 insertions(+), 20 deletions(-) diff --git a/deps/v8/include/v8-version.h b/deps/v8/include/v8-version.h index 747b33f6da13e6..4bdb66b2bf6847 100644 --- a/deps/v8/include/v8-version.h +++ b/deps/v8/include/v8-version.h @@ -11,7 +11,7 @@ #define V8_MAJOR_VERSION 9 #define V8_MINOR_VERSION 1 #define V8_BUILD_NUMBER 269 -#define V8_PATCH_LEVEL 36 +#define V8_PATCH_LEVEL 38 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 409855bb55d56e..d117d6c50e7207 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -2129,6 +2129,10 @@ void Heap::CompleteSweepingYoung(GarbageCollector collector) { array_buffer_sweeper()->EnsureFinished(); } +void Heap::EnsureSweepingCompleted() { + mark_compact_collector()->EnsureSweepingCompleted(); +} + void Heap::UpdateCurrentEpoch(GarbageCollector collector) { if (IsYoungGenerationCollector(collector)) { epoch_young_ = next_epoch(); diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h index 25b8f5964e054f..429f8864be7adb 100644 --- a/deps/v8/src/heap/heap.h +++ b/deps/v8/src/heap/heap.h @@ -1074,6 +1074,8 @@ class Heap { void CompleteSweepingFull(); void CompleteSweepingYoung(GarbageCollector collector); + void EnsureSweepingCompleted(); + IncrementalMarking* incremental_marking() { return incremental_marking_.get(); } diff --git a/deps/v8/src/json/json-parser.cc b/deps/v8/src/json/json-parser.cc index a85d2af94bfabc..ccea49e89fa372 100644 --- a/deps/v8/src/json/json-parser.cc +++ b/deps/v8/src/json/json-parser.cc @@ -620,6 +620,11 @@ Handle JsonParser::BuildJsonObject( DCHECK_EQ(mutable_double_address, end); } #endif + // Before setting the length of mutable_double_buffer back to zero, we + // must ensure that the sweeper is not running or has already swept the + // object's page. Otherwise the GC can add the contents of + // mutable_double_buffer to the free list. + isolate()->heap()->EnsureSweepingCompleted(); mutable_double_buffer->set_length(0); } } diff --git a/deps/v8/src/wasm/wasm-js.cc b/deps/v8/src/wasm/wasm-js.cc index bc9c5557eb20f1..7f1d8e261fffdf 100644 --- a/deps/v8/src/wasm/wasm-js.cc +++ b/deps/v8/src/wasm/wasm-js.cc @@ -2318,28 +2318,49 @@ void WasmJs::InstallConditionalFeatures(Isolate* isolate, Handle global = handle(context->global_object(), isolate); MaybeHandle maybe_webassembly = JSObject::GetProperty(isolate, global, "WebAssembly"); - Handle webassembly = - Handle::cast(maybe_webassembly.ToHandleChecked()); + Handle webassembly_obj; + if (!maybe_webassembly.ToHandle(&webassembly_obj)) { + // There is not {WebAssembly} object. We just return without adding the + // {Exception} constructor. + return; + } + if (!webassembly_obj->IsJSObject()) { + // The {WebAssembly} object is invalid. As we cannot add the {Exception} + // constructor, we just return. + return; + } + Handle webassembly = Handle::cast(webassembly_obj); // Setup Exception Handle exception_name = v8_str(isolate, "Exception"); - if (!JSObject::HasProperty(webassembly, exception_name).FromMaybe(true)) { - Handle exception_constructor = - CreateFunc(isolate, exception_name, WebAssemblyException, true, - SideEffectType::kHasSideEffect); - exception_constructor->shared().set_length(1); - JSObject::AddProperty(isolate, webassembly, exception_name, - exception_constructor, DONT_ENUM); - // Install the constructor on the context. - context->set_wasm_exception_constructor(*exception_constructor); - SetDummyInstanceTemplate(isolate, exception_constructor); - JSFunction::EnsureHasInitialMap(exception_constructor); - Handle exception_proto( - JSObject::cast(exception_constructor->instance_prototype()), isolate); - Handle exception_map = isolate->factory()->NewMap( - i::WASM_EXCEPTION_OBJECT_TYPE, WasmExceptionObject::kHeaderSize); - JSFunction::SetInitialMap(isolate, exception_constructor, exception_map, - exception_proto); + + if (JSObject::HasOwnProperty(webassembly, exception_name).FromMaybe(true)) { + // The {Exception} constructor already exists, there is nothing more to + // do. + return; + } + + bool has_prototype = true; + Handle exception_constructor = + CreateFunc(isolate, exception_name, WebAssemblyException, has_prototype, + SideEffectType::kHasNoSideEffect); + exception_constructor->shared().set_length(1); + auto result = Object::SetProperty( + isolate, webassembly, exception_name, exception_constructor, + StoreOrigin::kNamed, Just(ShouldThrow::kDontThrow)); + if (result.is_null()) { + // Setting the {Exception} constructor failed. We just bail out. + return; } + // Install the constructor on the context. + context->set_wasm_exception_constructor(*exception_constructor); + SetDummyInstanceTemplate(isolate, exception_constructor); + JSFunction::EnsureHasInitialMap(exception_constructor); + Handle exception_proto( + JSObject::cast(exception_constructor->instance_prototype()), isolate); + Handle exception_map = isolate->factory()->NewMap( + i::WASM_EXCEPTION_OBJECT_TYPE, WasmExceptionObject::kHeaderSize); + JSFunction::SetInitialMap(isolate, exception_constructor, exception_map, + exception_proto); } } #undef ASSIGN From ac7184d8c78f837ec905dff9bc2b4f67a2f0de02 Mon Sep 17 00:00:00 2001 From: Tobias Koppers Date: Wed, 7 Jul 2021 09:43:48 +0200 Subject: [PATCH 101/133] http: clean up HttpParser correctly MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove reference to kOnMessageBegin from HttpParser to avoid leaking Server instances in FreeList. PR-URL: https://github.com/nodejs/node/pull/39292 Reviewed-By: Robert Nagy Reviewed-By: Luigi Pinca Reviewed-By: Colin Ihrig Reviewed-By: Gerhard Stöbich --- lib/_http_common.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/_http_common.js b/lib/_http_common.js index e3e732a8a180c6..642cdea41f68b1 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -46,6 +46,7 @@ let debug = require('internal/util/debuglog').debuglog('http', (fn) => { const kIncomingMessage = Symbol('IncomingMessage'); const kRequestTimeout = Symbol('RequestTimeout'); +const kOnMessageBegin = HTTPParser.kOnMessageBegin | 0; const kOnHeaders = HTTPParser.kOnHeaders | 0; const kOnHeadersComplete = HTTPParser.kOnHeadersComplete | 0; const kOnBody = HTTPParser.kOnBody | 0; @@ -239,6 +240,7 @@ function cleanParser(parser) { parser.incoming = null; parser.outgoing = null; parser.maxHeaderPairs = MAX_HEADER_PAIRS; + parser[kOnMessageBegin] = null; parser[kOnExecute] = null; parser[kOnTimeout] = null; parser._consumed = false; From 0bb980aeaf752807002ebcac031960501f233ba9 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Thu, 8 Jul 2021 21:50:30 -0700 Subject: [PATCH 102/133] test: remove eslint-disable comment from fixture file MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixtures are not linted so eslint-disable comments are unnecessary. PR-URL: https://github.com/nodejs/node/pull/39320 Reviewed-By: Zeyu Yang Reviewed-By: Michaël Zasso Reviewed-By: Colin Ihrig Reviewed-By: James M Snell Reviewed-By: Darshan Sen --- test/fixtures/test-resolution-inspect-brk-resolver.js | 1 - 1 file changed, 1 deletion(-) diff --git a/test/fixtures/test-resolution-inspect-brk-resolver.js b/test/fixtures/test-resolution-inspect-brk-resolver.js index fdfb5ca5b170c2..b5569e69fcf698 100644 --- a/test/fixtures/test-resolution-inspect-brk-resolver.js +++ b/test/fixtures/test-resolution-inspect-brk-resolver.js @@ -1,5 +1,4 @@ 'use strict'; -// eslint-disable-next-line no-unused-vars const common = require('../common'); require.extensions['.ext'] = require.extensions['.js']; From 29673b8ac876558a438e1f7e1fccfea211666d71 Mon Sep 17 00:00:00 2001 From: Voltrex <62040526+VoltrexMaster@users.noreply.github.com> Date: Sat, 29 May 2021 01:08:48 +0430 Subject: [PATCH 103/133] typings: add JSDoc typings for timers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Added JSDoc typings for the `timers` lib module. PR-URL: https://github.com/nodejs/node/pull/38834 Reviewed-By: Anatoli Papirovski Reviewed-By: Michaël Zasso --- lib/timers.js | 51 +++++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 47 insertions(+), 4 deletions(-) diff --git a/lib/timers.js b/lib/timers.js index 485f577d29f2f0..a4543fea1df6bc 100644 --- a/lib/timers.js +++ b/lib/timers.js @@ -127,10 +127,16 @@ function enroll(item, msecs) { } -/* - * DOM-style timers +/** + * Schedules the execution of a one-time `callback` + * after `after` milliseconds. + * @param {Function} callback + * @param {number} [after] + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Timeout} */ - function setTimeout(callback, after, arg1, arg2, arg3) { validateCallback(callback); @@ -170,6 +176,11 @@ ObjectDefineProperty(setTimeout, customPromisify, { } }); +/** + * Cancels a timeout. + * @param {Timeout | string | number} timer + * @returns {void} + */ function clearTimeout(timer) { if (timer && timer._onTimeout) { timer._onTimeout = null; @@ -185,6 +196,16 @@ function clearTimeout(timer) { } } +/** + * Schedules repeated execution of `callback` + * every `repeat` milliseconds. + * @param {Function} callback + * @param {number} [repeat] + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Timeout} + */ function setInterval(callback, repeat, arg1, arg2, arg3) { validateCallback(callback); @@ -215,6 +236,11 @@ function setInterval(callback, repeat, arg1, arg2, arg3) { return timeout; } +/** + * Cancels an interval. + * @param {Timeout | string | number} timer + * @returns {void} + */ function clearInterval(timer) { // clearTimeout and clearInterval can be used to clear timers created from // both setTimeout and setInterval, as specified by HTML Living Standard: @@ -227,6 +253,10 @@ Timeout.prototype.close = function() { return this; }; +/** + * Coerces a `Timeout` to a primitive. + * @returns {number} + */ Timeout.prototype[SymbolToPrimitive] = function() { const id = this[async_id_symbol]; if (!this[kHasPrimitive]) { @@ -236,6 +266,15 @@ Timeout.prototype[SymbolToPrimitive] = function() { return id; }; +/** + * Schedules the immediate execution of `callback` + * after I/O events' callbacks. + * @param {Function} callback + * @param {any} [arg1] + * @param {any} [arg2] + * @param {any} [arg3] + * @returns {Immediate} + */ function setImmediate(callback, arg1, arg2, arg3) { validateCallback(callback); @@ -271,7 +310,11 @@ ObjectDefineProperty(setImmediate, customPromisify, { } }); - +/** + * Cancels an immediate. + * @param {Immediate} immediate + * @returns {void} + */ function clearImmediate(immediate) { if (!immediate || immediate._destroyed) return; From f6a10924717ae7ea75c512db5690c24f8cc0e7ae Mon Sep 17 00:00:00 2001 From: Makoto Kato Date: Wed, 16 Jun 2021 08:12:42 +0900 Subject: [PATCH 104/133] build: allow to build riscv64 using Makefile MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39048 Reviewed-By: Richard Lau Reviewed-By: Michaël Zasso --- Makefile | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/Makefile b/Makefile index de2691b34146c6..886011b98797e3 100644 --- a/Makefile +++ b/Makefile @@ -847,6 +847,9 @@ else ifeq ($(findstring powerpc,$(shell uname -p)),powerpc) DESTCPU ?= ppc64 else +ifeq ($(findstring riscv64,$(UNAME_M)),riscv64) +DESTCPU ?= riscv64 +else DESTCPU ?= x86 endif endif @@ -857,6 +860,7 @@ endif endif endif endif +endif ifeq ($(DESTCPU),x64) ARCH=x64 else @@ -878,6 +882,9 @@ else ifeq ($(DESTCPU),s390x) ARCH=s390x else +ifeq ($(DESTCPU),riscv64) +ARCH=riscv64 +else ARCH=x86 endif endif @@ -886,6 +893,7 @@ endif endif endif endif +endif # node and v8 use different arch names (e.g. node 'x86' vs v8 'ia32'). # pass the proper v8 arch name to $V8_ARCH based on user-specified $DESTCPU. From e26635085a4ba768ad841e3497a75073925bd8bf Mon Sep 17 00:00:00 2001 From: foxxyz Date: Sat, 26 Jun 2021 16:03:02 -0700 Subject: [PATCH 105/133] doc: replace outdated `util.promisify` timer examples with references PR-URL: https://github.com/nodejs/node/pull/39164 Reviewed-By: Antoine du Hamel Reviewed-By: James M Snell --- doc/api/timers.md | 45 ++++++++++----------------------------------- 1 file changed, 10 insertions(+), 35 deletions(-) diff --git a/doc/api/timers.md b/doc/api/timers.md index 89f9ad312e33c4..cfbc8f1ead4b71 100644 --- a/doc/api/timers.md +++ b/doc/api/timers.md @@ -170,25 +170,7 @@ next event loop iteration. If `callback` is not a function, a [`TypeError`][] will be thrown. This method has a custom variant for promises that is available using -[`util.promisify()`][]: - -```js -const util = require('util'); -const setImmediatePromise = util.promisify(setImmediate); - -setImmediatePromise('foobar').then((value) => { - // value === 'foobar' (passing values is optional) - // This is executed after all I/O callbacks. -}); - -// Or with async function -async function timerExample() { - console.log('Before I/O callbacks'); - await setImmediatePromise(); - console.log('After I/O callbacks'); -} -timerExample(); -``` +[`timersPromises.setImmediate()`][]. ### `setInterval(callback[, delay[, ...args]])` Collaborators follow the [Collaborator Guide](./doc/guides/collaborator-guide.md) in @@ -655,7 +667,9 @@ gpg --keyserver pool.sks-keyservers.net --recv-keys B9E2F5981AA6E0CD28160D9FF139 See the section above on [Verifying Binaries](#verifying-binaries) for how to use these keys to verify a downloaded file. -Other keys used to sign some previous releases: +
    + +Other keys used to sign some previous releases * **Chris Dickinson** <christopher.s.dickinson@gmail.com> `9554F04D7259F04124DE6B476D5A82AC7E37093B` @@ -676,6 +690,8 @@ Other keys used to sign some previous releases: * **Timothy J Fontaine** <tjfontaine@gmail.com> `7937DFD2AB06298B2293C3187D33FF9D0246406D` +
    + ## License Node.js is available under the From 8381132f76d4a47542e5221f8c764750afed2639 Mon Sep 17 00:00:00 2001 From: Lu Yahan Date: Fri, 11 Jun 2021 16:09:30 +0800 Subject: [PATCH 110/133] build: add riscv into host_arch_cc PR-URL: https://github.com/nodejs/node/pull/39004 Reviewed-By: Richard Lau Reviewed-By: James M Snell Reviewed-By: Anna Henningsen Reviewed-By: Michael Dawson --- configure.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/configure.py b/configure.py index 4bc790e2f24219..d69c52521d80fb 100755 --- a/configure.py +++ b/configure.py @@ -1066,6 +1066,7 @@ def host_arch_cc(): '__PPC__' : 'ppc64', '__x86_64__' : 'x64', '__s390x__' : 's390x', + '__riscv' : 'riscv', } rtn = 'ia32' # default @@ -1078,6 +1079,12 @@ def host_arch_cc(): if rtn == 'mipsel' and '_LP64' in k: rtn = 'mips64el' + if rtn == 'riscv': + if k['__riscv_xlen'] == '64': + rtn = 'riscv64' + else: + rtn = 'riscv32' + return rtn From adb812c0423704cab9a74529c7a81109a58ebd4c Mon Sep 17 00:00:00 2001 From: nerdthatnoonelikes Date: Sun, 6 Jun 2021 12:29:37 -0500 Subject: [PATCH 111/133] typings: add a few JSDoc typings for the net lib module MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/38953 Reviewed-By: James M Snell Reviewed-By: Michaël Zasso --- lib/net.js | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/lib/net.js b/lib/net.js index 899e73f6ff167d..20601007fab695 100644 --- a/lib/net.js +++ b/lib/net.js @@ -163,6 +163,16 @@ function isPipeName(s) { return typeof s === 'string' && toNumber(s) === false; } +/** + * Creates a new TCP or IPC server + * @param {{ + * allowHalfOpen?: boolean; + * pauseOnConnect?: boolean; + * }} [options] + * @param {Function} [connectionListener] + * @returns {Server} + */ + function createServer(options, connectionListener) { return new Server(options, connectionListener); } @@ -1548,6 +1558,11 @@ function onconnection(err, clientHandle) { self.emit('connection', socket); } +/** + * Gets the number of concurrent connections on the server + * @param {Function} cb + * @returns {Server} + */ Server.prototype.getConnections = function(cb) { const self = this; From dfe99d2aac3bb80d80a0c0e05cb9f395b4f82406 Mon Sep 17 00:00:00 2001 From: Robert Nagy Date: Sat, 10 Jul 2021 00:50:56 +0200 Subject: [PATCH 112/133] tls: move legacy code into own file PR-URL: https://github.com/nodejs/node/pull/39333 Reviewed-By: Matteo Collina Reviewed-By: Antoine du Hamel --- lib/_tls_common.js | 5 +- lib/internal/streams/duplexpair.js | 51 ----------- lib/internal/tls/parse-cert-string.js | 35 ++++++++ .../{tls.js => tls/secure-context.js} | 27 ------ lib/internal/tls/secure-pair.js | 86 +++++++++++++++++++ lib/tls.js | 53 ++---------- src/node_native_module.cc | 4 +- test/parallel/test-tls-parse-cert-string.js | 10 +-- 8 files changed, 142 insertions(+), 129 deletions(-) delete mode 100644 lib/internal/streams/duplexpair.js create mode 100644 lib/internal/tls/parse-cert-string.js rename lib/internal/{tls.js => tls/secure-context.js} (92%) create mode 100644 lib/internal/tls/secure-pair.js diff --git a/lib/_tls_common.js b/lib/_tls_common.js index 5ca6d65181d0ae..21b22a42507c5b 100644 --- a/lib/_tls_common.js +++ b/lib/_tls_common.js @@ -52,8 +52,11 @@ const { const { configSecureContext, +} = require('internal/tls/secure-context'); + +const { parseCertString, -} = require('internal/tls'); +} = require('internal/tls/parse-cert-string'); function toV(which, v, def) { if (v == null) v = def; diff --git a/lib/internal/streams/duplexpair.js b/lib/internal/streams/duplexpair.js deleted file mode 100644 index ec92cbe8716df4..00000000000000 --- a/lib/internal/streams/duplexpair.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -const { - Symbol, -} = primordials; - -const { Duplex } = require('stream'); - -const kCallback = Symbol('Callback'); -const kOtherSide = Symbol('Other'); - -class DuplexSocket extends Duplex { - constructor() { - super(); - this[kCallback] = null; - this[kOtherSide] = null; - } - - _read() { - const callback = this[kCallback]; - if (callback) { - this[kCallback] = null; - callback(); - } - } - - _write(chunk, encoding, callback) { - if (chunk.length === 0) { - process.nextTick(callback); - } else { - this[kOtherSide].push(chunk); - this[kOtherSide][kCallback] = callback; - } - } - - _final(callback) { - this[kOtherSide].on('end', callback); - this[kOtherSide].push(null); - } -} - -class DuplexPair { - constructor() { - this.socket1 = new DuplexSocket(); - this.socket2 = new DuplexSocket(); - this.socket1[kOtherSide] = this.socket2; - this.socket2[kOtherSide] = this.socket1; - } -} - -module.exports = DuplexPair; diff --git a/lib/internal/tls/parse-cert-string.js b/lib/internal/tls/parse-cert-string.js new file mode 100644 index 00000000000000..a499df886097b4 --- /dev/null +++ b/lib/internal/tls/parse-cert-string.js @@ -0,0 +1,35 @@ +'use strict'; + +const { + ArrayIsArray, + ArrayPrototypeForEach, + ArrayPrototypePush, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeSplit, + ObjectCreate, +} = primordials; + +// Example: +// C=US\nST=CA\nL=SF\nO=Joyent\nOU=Node.js\nCN=ca1\nemailAddress=ry@clouds.org +function parseCertString(s) { + const out = ObjectCreate(null); + ArrayPrototypeForEach(StringPrototypeSplit(s, '\n'), (part) => { + const sepIndex = StringPrototypeIndexOf(part, '='); + if (sepIndex > 0) { + const key = StringPrototypeSlice(part, 0, sepIndex); + const value = StringPrototypeSlice(part, sepIndex + 1); + if (key in out) { + if (!ArrayIsArray(out[key])) { + out[key] = [out[key]]; + } + ArrayPrototypePush(out[key], value); + } else { + out[key] = value; + } + } + }); + return out; +} + +exports.parseCertString = parseCertString; diff --git a/lib/internal/tls.js b/lib/internal/tls/secure-context.js similarity index 92% rename from lib/internal/tls.js rename to lib/internal/tls/secure-context.js index 0a9eea8f3eb026..50a68df092c981 100644 --- a/lib/internal/tls.js +++ b/lib/internal/tls/secure-context.js @@ -5,12 +5,8 @@ const { ArrayPrototypeFilter, ArrayPrototypeForEach, ArrayPrototypeJoin, - ArrayPrototypePush, - StringPrototypeIndexOf, - StringPrototypeSlice, StringPrototypeSplit, StringPrototypeStartsWith, - ObjectCreate, } = primordials; const { @@ -42,28 +38,6 @@ const { }, } = internalBinding('constants'); -// Example: -// C=US\nST=CA\nL=SF\nO=Joyent\nOU=Node.js\nCN=ca1\nemailAddress=ry@clouds.org -function parseCertString(s) { - const out = ObjectCreate(null); - ArrayPrototypeForEach(StringPrototypeSplit(s, '\n'), (part) => { - const sepIndex = StringPrototypeIndexOf(part, '='); - if (sepIndex > 0) { - const key = StringPrototypeSlice(part, 0, sepIndex); - const value = StringPrototypeSlice(part, sepIndex + 1); - if (key in out) { - if (!ArrayIsArray(out[key])) { - out[key] = [out[key]]; - } - ArrayPrototypePush(out[key], value); - } else { - out[key] = value; - } - } - }); - return out; -} - function getDefaultEcdhCurve() { // We do it this way because DEFAULT_ECDH_CURVE can be // changed by users, so we need to grab the current @@ -340,5 +314,4 @@ function configSecureContext(context, options = {}, name = 'options') { module.exports = { configSecureContext, - parseCertString, }; diff --git a/lib/internal/tls/secure-pair.js b/lib/internal/tls/secure-pair.js new file mode 100644 index 00000000000000..b3f0930a3c7118 --- /dev/null +++ b/lib/internal/tls/secure-pair.js @@ -0,0 +1,86 @@ +'use strict'; + +const EventEmitter = require('events'); +const { Duplex } = require('stream'); +const _tls_wrap = require('_tls_wrap'); +const _tls_common = require('_tls_common'); + +const { + Symbol, + ReflectConstruct, +} = primordials; + +const kCallback = Symbol('Callback'); +const kOtherSide = Symbol('Other'); + +class DuplexSocket extends Duplex { + constructor() { + super(); + this[kCallback] = null; + this[kOtherSide] = null; + } + + _read() { + const callback = this[kCallback]; + if (callback) { + this[kCallback] = null; + callback(); + } + } + + _write(chunk, encoding, callback) { + if (chunk.length === 0) { + process.nextTick(callback); + } else { + this[kOtherSide].push(chunk); + this[kOtherSide][kCallback] = callback; + } + } + + _final(callback) { + this[kOtherSide].on('end', callback); + this[kOtherSide].push(null); + } +} + +class DuplexPair { + constructor() { + this.socket1 = new DuplexSocket(); + this.socket2 = new DuplexSocket(); + this.socket1[kOtherSide] = this.socket2; + this.socket2[kOtherSide] = this.socket1; + } +} + +class SecurePair extends EventEmitter { + constructor(secureContext = _tls_common.createSecureContext(), + isServer = false, + requestCert = !isServer, + rejectUnauthorized = false, + options = {}) { + super(); + const { socket1, socket2 } = new DuplexPair(); + + this.server = options.server; + this.credentials = secureContext; + + this.encrypted = socket1; + this.cleartext = new _tls_wrap.TLSSocket(socket2, { + secureContext, + isServer, + requestCert, + rejectUnauthorized, + ...options + }); + this.cleartext.once('secure', () => this.emit('secure')); + } + + destroy() { + this.cleartext.destroy(); + this.encrypted.destroy(); + } +} + +exports.createSecurePair = function createSecurePair(...args) { + return ReflectConstruct(SecurePair, args); +}; diff --git a/lib/tls.js b/lib/tls.js index 2282fd33008868..683736460b1ef7 100644 --- a/lib/tls.js +++ b/lib/tls.js @@ -32,7 +32,6 @@ const { ArrayPrototypeSome, ObjectDefineProperty, ObjectFreeze, - ReflectConstruct, RegExpPrototypeTest, StringFromCharCode, StringPrototypeCharCodeAt, @@ -50,19 +49,18 @@ const { } = require('internal/errors').codes; const internalUtil = require('internal/util'); internalUtil.assertCrypto(); -const internalTLS = require('internal/tls'); const { isArrayBufferView } = require('internal/util/types'); const net = require('net'); const { getOptionValue } = require('internal/options'); const { getRootCertificates, getSSLCiphers } = internalBinding('crypto'); const { Buffer } = require('buffer'); -const EventEmitter = require('events'); const { URL } = require('internal/url'); -const DuplexPair = require('internal/streams/duplexpair'); const { canonicalizeIP } = internalBinding('cares_wrap'); const _tls_common = require('_tls_common'); const _tls_wrap = require('_tls_wrap'); +const { createSecurePair } = require('internal/tls/secure-pair'); +const { parseCertString } = require('internal/tls/parse-cert-string'); // Allow {CLIENT_RENEG_LIMIT} client-initiated session renegotiations // every {CLIENT_RENEG_WINDOW} seconds. An error event is emitted if more @@ -300,43 +298,6 @@ exports.checkServerIdentity = function checkServerIdentity(hostname, cert) { } }; - -class SecurePair extends EventEmitter { - constructor(secureContext = exports.createSecureContext(), - isServer = false, - requestCert = !isServer, - rejectUnauthorized = false, - options = {}) { - super(); - const { socket1, socket2 } = new DuplexPair(); - - this.server = options.server; - this.credentials = secureContext; - - this.encrypted = socket1; - this.cleartext = new exports.TLSSocket(socket2, { - secureContext, - isServer, - requestCert, - rejectUnauthorized, - ...options - }); - this.cleartext.once('secure', () => this.emit('secure')); - } - - destroy() { - this.cleartext.destroy(); - this.encrypted.destroy(); - } -} - - -exports.parseCertString = internalUtil.deprecate( - internalTLS.parseCertString, - 'tls.parseCertString() is deprecated. ' + - 'Please use querystring.parse() instead.', - 'DEP0076'); - exports.createSecureContext = _tls_common.createSecureContext; exports.SecureContext = _tls_common.SecureContext; exports.TLSSocket = _tls_wrap.TLSSocket; @@ -344,9 +305,13 @@ exports.Server = _tls_wrap.Server; exports.createServer = _tls_wrap.createServer; exports.connect = _tls_wrap.connect; +exports.parseCertString = internalUtil.deprecate( + parseCertString, + 'tls.parseCertString() is deprecated. ' + + 'Please use querystring.parse() instead.', + 'DEP0076'); + exports.createSecurePair = internalUtil.deprecate( - function createSecurePair(...args) { - return ReflectConstruct(SecurePair, args); - }, + createSecurePair, 'tls.createSecurePair() is deprecated. Please use ' + 'tls.TLSSocket instead.', 'DEP0064'); diff --git a/src/node_native_module.cc b/src/node_native_module.cc index a1aff0a9c74889..2642982330e8cf 100644 --- a/src/node_native_module.cc +++ b/src/node_native_module.cc @@ -99,7 +99,9 @@ void NativeModuleLoader::InitializeModuleCategories() { "tls", "_tls_common", "_tls_wrap", - "internal/tls", + "internal/tls/secure-pair", + "internal/tls/parse-cert-string", + "internal/tls/secure-context", "internal/http2/core", "internal/http2/compat", "internal/policy/manifest", diff --git a/test/parallel/test-tls-parse-cert-string.js b/test/parallel/test-tls-parse-cert-string.js index f5412cad4074c4..c1f32524d578b5 100644 --- a/test/parallel/test-tls-parse-cert-string.js +++ b/test/parallel/test-tls-parse-cert-string.js @@ -11,7 +11,7 @@ const { } = require('../common/hijackstdio'); const assert = require('assert'); // Flags: --expose-internals -const internalTLS = require('internal/tls'); +const { parseCertString } = require('internal/tls/parse-cert-string'); const tls = require('tls'); const noOutput = common.mustNotCall(); @@ -20,7 +20,7 @@ hijackStderr(noOutput); { const singles = 'C=US\nST=CA\nL=SF\nO=Node.js Foundation\nOU=Node.js\n' + 'CN=ca1\nemailAddress=ry@clouds.org'; - const singlesOut = internalTLS.parseCertString(singles); + const singlesOut = parseCertString(singles); assert.deepStrictEqual(singlesOut, { __proto__: null, C: 'US', @@ -36,7 +36,7 @@ hijackStderr(noOutput); { const doubles = 'OU=Domain Control Validated\nOU=PositiveSSL Wildcard\n' + 'CN=*.nodejs.org'; - const doublesOut = internalTLS.parseCertString(doubles); + const doublesOut = parseCertString(doubles); assert.deepStrictEqual(doublesOut, { __proto__: null, OU: [ 'Domain Control Validated', 'PositiveSSL Wildcard' ], @@ -46,7 +46,7 @@ hijackStderr(noOutput); { const invalid = 'fhqwhgads'; - const invalidOut = internalTLS.parseCertString(invalid); + const invalidOut = parseCertString(invalid); assert.deepStrictEqual(invalidOut, { __proto__: null }); } @@ -55,7 +55,7 @@ hijackStderr(noOutput); const expected = Object.create(null); expected.__proto__ = 'mostly harmless'; expected.hasOwnProperty = 'not a function'; - assert.deepStrictEqual(internalTLS.parseCertString(input), expected); + assert.deepStrictEqual(parseCertString(input), expected); } restoreStderr(); From 1fc6382942db5f177305836168c6baba45f8c82c Mon Sep 17 00:00:00 2001 From: Robert Nagy Date: Sat, 10 Jul 2021 00:37:09 +0200 Subject: [PATCH 113/133] stream: don't emit prefinish after error or close PR-URL: https://github.com/nodejs/node/pull/39332 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- lib/internal/streams/writable.js | 10 ++++----- .../test-stream-writable-final-destroy.js | 21 +++++++++++++++++++ 2 files changed, 25 insertions(+), 6 deletions(-) create mode 100644 test/parallel/test-stream-writable-final-destroy.js diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 03f63b16bd04ff..91d1230f1c7d9a 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -654,7 +654,9 @@ function needFinish(state) { !state.errored && state.buffered.length === 0 && !state.finished && - !state.writing); + !state.writing && + !state.errorEmitted && + !state.closeEmitted); } function callFinal(stream, state) { @@ -685,7 +687,7 @@ function callFinal(stream, state) { then.call( result, function() { - if (state.prefinished) + if (state.prefinished || !needFinish(state)) return; state.prefinish = true; process.nextTick(() => stream.emit('prefinish')); @@ -735,10 +737,6 @@ function finishMaybe(stream, state, sync) { function finish(stream, state) { state.pendingcb--; - // TODO (ronag): Unify with needFinish. - if (state.errorEmitted || state.closeEmitted) - return; - state.finished = true; const onfinishCallbacks = state[kOnFinished].splice(0); diff --git a/test/parallel/test-stream-writable-final-destroy.js b/test/parallel/test-stream-writable-final-destroy.js new file mode 100644 index 00000000000000..8d3bf72c89126f --- /dev/null +++ b/test/parallel/test-stream-writable-final-destroy.js @@ -0,0 +1,21 @@ +'use strict'; +const common = require('../common'); + +const { Writable } = require('stream'); + +{ + const w = new Writable({ + write(chunk, encoding, callback) { + callback(null); + }, + final(callback) { + queueMicrotask(callback); + } + }); + w.end(); + w.destroy(); + + w.on('prefinish', common.mustNotCall()); + w.on('finish', common.mustNotCall()); + w.on('close', common.mustCall()); +} From cb32f69e005153fac2dc7cda33a6373435e430c2 Mon Sep 17 00:00:00 2001 From: Robert Nagy Date: Fri, 9 Jul 2021 20:18:06 +0200 Subject: [PATCH 114/133] stream: cleanup async handling Cleanup async stream method handling. PR-URL: https://github.com/nodejs/node/pull/39329 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- lib/internal/streams/destroy.js | 130 +++++------------- lib/internal/streams/readable.js | 2 + lib/internal/streams/writable.js | 41 +++--- .../test-stream-construct-async-error.js | 3 + 4 files changed, 63 insertions(+), 113 deletions(-) diff --git a/lib/internal/streams/destroy.js b/lib/internal/streams/destroy.js index a2892c67a0fcfa..6d50b09f0948b4 100644 --- a/lib/internal/streams/destroy.js +++ b/lib/internal/streams/destroy.js @@ -69,12 +69,16 @@ function destroy(err, cb) { function _destroy(self, err, cb) { let called = false; - const result = self._destroy(err || null, (err) => { - const r = self._readableState; - const w = self._writableState; + function onDestroy(err) { + if (called) { + return; + } called = true; + const r = self._readableState; + const w = self._writableState; + checkError(err, w, r); if (w) { @@ -93,64 +97,24 @@ function _destroy(self, err, cb) { } else { process.nextTick(emitCloseNT, self); } - }); - if (result !== undefined && result !== null) { - try { + } + try { + const result = self._destroy(err || null, onDestroy); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - if (called) - return; - - const r = self._readableState; - const w = self._writableState; - - if (w) { - w.closed = true; - } - if (r) { - r.closed = true; - } - - if (typeof cb === 'function') { - process.nextTick(cb); - } - - process.nextTick(emitCloseNT, self); + process.nextTick(onDestroy, null); }, function(err) { - const r = self._readableState; - const w = self._writableState; - err.stack; // eslint-disable-line no-unused-expressions - - called = true; - - if (w && !w.errored) { - w.errored = err; - } - if (r && !r.errored) { - r.errored = err; - } - - if (w) { - w.closed = true; - } - if (r) { - r.closed = true; - } - - if (typeof cb === 'function') { - process.nextTick(cb, err); - } - - process.nextTick(emitErrorCloseNT, self, err); + process.nextTick(onDestroy, err); }); } - } catch (err) { - process.nextTick(emitErrorNT, self, err); } + } catch (err) { + onDestroy(err); } } @@ -284,13 +248,19 @@ function construct(stream, cb) { } function constructNT(stream) { - const r = stream._readableState; - const w = stream._writableState; - // With duplex streams we use the writable side for state. - const s = w || r; - let called = false; - const result = stream._construct((err) => { + + function onConstruct(err) { + if (called) { + errorOrDestroy(stream, err ?? new ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + + const r = stream._readableState; + const w = stream._writableState; + const s = w || r; + if (r) { r.constructed = true; } @@ -298,12 +268,6 @@ function constructNT(stream) { w.constructed = true; } - if (called) { - err = new ERR_MULTIPLE_CALLBACK(); - } else { - called = true; - } - if (s.destroyed) { stream.emit(kDestroy, err); } else if (err) { @@ -311,47 +275,25 @@ function constructNT(stream) { } else { process.nextTick(emitConstructNT, stream); } - }); - if (result !== undefined && result !== null) { - try { + } + + try { + const result = stream._construct(onConstruct); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - // If the callback was invoked, do nothing further. - if (called) - return; - if (r) { - r.constructed = true; - } - if (w) { - w.constructed = true; - } - if (s.destroyed) { - process.nextTick(() => stream.emit(kDestroy)); - } else { - process.nextTick(emitConstructNT, stream); - } + process.nextTick(onConstruct, null); }, function(err) { - if (r) { - r.constructed = true; - } - if (w) { - w.constructed = true; - } - called = true; - if (s.destroyed) { - process.nextTick(() => stream.emit(kDestroy, err)); - } else { - process.nextTick(errorOrDestroy, stream, err); - } + process.nextTick(onConstruct, err); }); } - } catch (err) { - process.nextTick(emitErrorNT, stream, err); } + } catch (err) { + onConstruct(err); } } diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index 7f6876599cc7fc..d2d4f19ed3fa5c 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -479,8 +479,10 @@ Readable.prototype.read = function(n) { // If the length is currently zero, then we *need* a readable event. if (state.length === 0) state.needReadable = true; + // Call internal read method this._read(state.highWaterMark); + state.sync = false; // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 91d1230f1c7d9a..f41cc183f0939a 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -660,9 +660,15 @@ function needFinish(state) { } function callFinal(stream, state) { - state.sync = true; - state.pendingcb++; - const result = stream._final((err) => { + let called = false; + + function onFinish(err) { + if (called) { + errorOrDestroy(stream, err ?? ERR_MULTIPLE_CALLBACK()); + return; + } + called = true; + state.pendingcb--; if (err) { const onfinishCallbacks = state[kOnFinished].splice(0); @@ -679,33 +685,30 @@ function callFinal(stream, state) { state.pendingcb++; process.nextTick(finish, stream, state); } - }); - if (result !== undefined && result !== null) { - try { + } + + state.sync = true; + state.pendingcb++; + + try { + const result = stream._final(onFinish); + if (result != null) { const then = result.then; if (typeof then === 'function') { then.call( result, function() { - if (state.prefinished || !needFinish(state)) - return; - state.prefinish = true; - process.nextTick(() => stream.emit('prefinish')); - state.pendingcb++; - process.nextTick(finish, stream, state); + process.nextTick(onFinish, null); }, function(err) { - const onfinishCallbacks = state[kOnFinished].splice(0); - for (let i = 0; i < onfinishCallbacks.length; i++) { - process.nextTick(onfinishCallbacks[i], err); - } - process.nextTick(errorOrDestroy, stream, err, state.sync); + process.nextTick(onFinish, err); }); } - } catch (err) { - process.nextTick(errorOrDestroy, stream, err, state.sync); } + } catch (err) { + onFinish(stream, state, err); } + state.sync = false; } diff --git a/test/parallel/test-stream-construct-async-error.js b/test/parallel/test-stream-construct-async-error.js index 34e450c853a850..8101ec93fc0d74 100644 --- a/test/parallel/test-stream-construct-async-error.js +++ b/test/parallel/test-stream-construct-async-error.js @@ -98,6 +98,9 @@ const assert = require('assert'); const foo = new Foo(); foo.write('test', common.mustCall()); + foo.on('error', common.expectsError({ + code: 'ERR_MULTIPLE_CALLBACK' + })); } { From cfd96aa8f91dddb1293aa94cb00912a19e5b9145 Mon Sep 17 00:00:00 2001 From: Robert Nagy Date: Sun, 11 Jul 2021 19:21:53 +0200 Subject: [PATCH 115/133] meta: fix tls code owners PR-URL: https://github.com/nodejs/node/pull/39355 Reviewed-By: Luigi Pinca Reviewed-By: Rich Trott --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 77333a671a5582..7fdbdc7ef86e95 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -54,7 +54,7 @@ # tls/crypto /lib/internal/crypto/* @nodejs/crypto -/lib/internal/tls.js @nodejs/crypto @nodejs/net +/lib/internal/tls/* @nodejs/crypto @nodejs/net /lib/crypto.js @nodejs/crypto /lib/tls.js @nodejs/crypto @nodejs/net /src/node_crypto* @nodejs/crypto From 10681828ac9fc470986d66e172c6df399c75a8d1 Mon Sep 17 00:00:00 2001 From: Richard Lau Date: Thu, 8 Jul 2021 22:27:41 -0400 Subject: [PATCH 116/133] build: update gcovr for gcc 8 compatibility Update the version of `gcovr` used for C++ coverage from 3.4 to 4.2 for compatibility with gcc/g++ 8. PR-URL: https://github.com/nodejs/node/pull/39326 Refs: https://github.com/nodejs/node/issues/39303 Refs: https://github.com/gcovr/gcovr/pull/228 Refs: https://github.com/nodejs/build/pull/2705 Reviewed-By: Michael Dawson Reviewed-By: Rich Trott Reviewed-By: James M Snell --- Makefile | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/Makefile b/Makefile index bf1890413ad6c2..e4d13a818a66b4 100644 --- a/Makefile +++ b/Makefile @@ -204,7 +204,7 @@ check: test # in place coverage-clean: $(RM) -r node_modules - $(RM) -r gcovr build + $(RM) -r gcovr $(RM) -r coverage/tmp $(FIND) out/$(BUILDTYPE)/obj.target \( -name "*.gcda" -o -name "*.gcno" \) \ -type f -exec $(RM) {} \; @@ -220,13 +220,7 @@ coverage: coverage-test ## Run the tests and generate a coverage report. .PHONY: coverage-build coverage-build: all -$(MAKE) coverage-build-js - if [ ! -d gcovr ]; then git clone -b 3.4 --depth=1 \ - --single-branch https://github.com/gcovr/gcovr.git; fi - if [ ! -d build ]; then git clone --depth=1 \ - --single-branch https://github.com/nodejs/build.git; fi - if [ ! -f gcovr/scripts/gcovr.orig ]; then \ - (cd gcovr && patch -N -p1 < \ - "$(CURDIR)/build/jenkins/scripts/coverage/gcovr-patches-3.4.diff"); fi + if [ ! -d gcovr ]; then $(PYTHON) -m pip install -t gcovr gcovr==4.2; fi $(MAKE) .PHONY: coverage-build-js @@ -238,16 +232,14 @@ coverage-build-js: .PHONY: coverage-test coverage-test: coverage-build - $(RM) out/$(BUILDTYPE)/obj.target/node/src/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node/src/*/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node_lib/src/*.gcda - $(RM) out/$(BUILDTYPE)/obj.target/node_lib/src/*/*.gcda + $(FIND) out/$(BUILDTYPE)/obj.target -name "*.gcda" -type f -exec $(RM) {} \; -NODE_V8_COVERAGE=coverage/tmp \ TEST_CI_ARGS="$(TEST_CI_ARGS) --type=coverage" $(MAKE) $(COVTESTS) $(MAKE) coverage-report-js - -(cd out && "../gcovr/scripts/gcovr" \ + -(cd out && PYTHONPATH=../gcovr $(PYTHON) -m gcovr \ --gcov-exclude='.*\b(deps|usr|out|cctest|embedding)\b' -v \ - -r Release/obj.target --html --html-detail -o ../coverage/cxxcoverage.html \ + -r ../src/ --object-directory Release/obj.target \ + --html --html-details -o ../coverage/cxxcoverage.html \ --gcov-executable="$(GCOV)") @printf "Javascript coverage %%: " @grep -B1 Lines coverage/index.html | head -n1 \ From f338fddbb0cbe9f17af111bba2a8b67586dd1002 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 9 Jul 2021 19:27:29 -0700 Subject: [PATCH 117/133] tools: add GitHub Action to run find-inactive-collaborators.mjs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add a GitHub Action for find-inactive-collaborators.mjs that will run it and list collaborators who have been inactive for more than a year. It will run when manually triggered by a collaborator and on a schedule of once a month. PR-URL: https://github.com/nodejs/node/pull/39335 Reviewed-By: Tobias Nießen Reviewed-By: James M Snell --- .../workflows/find-inactive-collaborators.yml | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 .github/workflows/find-inactive-collaborators.yml diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml new file mode 100644 index 00000000000000..8e459b5b8b2942 --- /dev/null +++ b/.github/workflows/find-inactive-collaborators.yml @@ -0,0 +1,17 @@ +name: Find inactive collaborators + +on: + schedule: + # Run on the 15th day of the month at 4:05 AM UTC. + - cron: '5 4 15 * *' + + workflow_dispatch: + +jobs: + find: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - run: tools/find-inactive-collaborators.mjs '1 year ago' From 3d351b29c1be6212e9bda81de9201b5b5b538642 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 08:19:50 +0200 Subject: [PATCH 118/133] deps: V8: cherry-pick cb4faa902e9f Original commit message: Reland "[liftoff][arm64] Use 64 bit offset reg in mem op" This is a reland of f645d0b857bc669271adcbe95cf25e1554347dd4 The issue was that converting an i64 to an i32 didn't clear the upper bits on arm64. This was not necessary before because we did the zero extension as part of the load operand, but this is required now that we use the full register. Original change's description: > [liftoff][arm64] Use 64 bit offset reg in mem op > > Accessing the Wasm memory with a 64 bit offset was truncated to 32 bit, > which is fine if we check bounds first, but not if we rely on the > trap handler to catch the OOB. > > R=clemensb@chromium.org > > Bug: v8:11587 > Change-Id: I82a3a2906e55d9d640c30e770a5c93532e3a442c > Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2808942 > Reviewed-by: Clemens Backes > Commit-Queue: Thibaud Michaud > Cr-Commit-Position: refs/heads/master@{#73829} Bug: v8:11587 Change-Id: Ibc182475745c6f697a0ba6d75c260b74ddf8fe52 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2810846 Reviewed-by: Clemens Backes Commit-Queue: Thibaud Michaud Cr-Commit-Position: refs/heads/master@{#73853} Refs: https://github.com/v8/v8/commit/cb4faa902e9f9fe848b46fbe8047f70ad4a54971 PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h index 39ef8528e5267a..af794f9a9d5e66 100644 --- a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h +++ b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h @@ -128,7 +128,7 @@ inline MemOperand GetMemOp(LiftoffAssembler* assm, UseScratchRegisterScope* temps, Register addr, Register offset, T offset_imm) { if (offset.is_valid()) { - if (offset_imm == 0) return MemOperand(addr.X(), offset.W(), UXTW); + if (offset_imm == 0) return MemOperand(addr.X(), offset.X()); Register tmp = temps->AcquireX(); DCHECK_GE(kMaxUInt32, offset_imm); assm->Add(tmp, offset.X(), offset_imm); @@ -1333,7 +1333,7 @@ bool LiftoffAssembler::emit_type_conversion(WasmOpcode opcode, LiftoffRegister src, Label* trap) { switch (opcode) { case kExprI32ConvertI64: - if (src != dst) Mov(dst.gp().W(), src.gp().W()); + Mov(dst.gp().W(), src.gp().W()); return true; case kExprI32SConvertF32: Fcvtzs(dst.gp().W(), src.fp().S()); // f32 -> i32 round to zero. From cf49ebb052d50fbd3d1d3b64f0e0ce906644be3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 08:23:03 +0200 Subject: [PATCH 119/133] deps: V8: cherry-pick 53784bdb8f01 Original commit message: [liftoff] Handle constant memory indexes specially This adds detection for constant memory indexes which can statically be proven to be in-bounds (because the effective offset is within the minimum memory size). In these cases, we can skip the bounds check and the out-of-line code for the trap-handler. This often saves 1-2% of code size. R=ahaas@chromium.org Bug: v8:11802 Change-Id: I0ee094e6f1f5d132af1d6a8a7c539a4af6c3cb5e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2919827 Commit-Queue: Clemens Backes Reviewed-by: Andreas Haas Cr-Commit-Position: refs/heads/master@{#74825} Refs: https://github.com/v8/v8/commit/53784bdb8f01a6ff76fc3acd3aec4d605cb3bfcc PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- deps/v8/src/wasm/baseline/liftoff-compiler.cc | 140 ++++++++++++------ 2 files changed, 99 insertions(+), 43 deletions(-) diff --git a/common.gypi b/common.gypi index aa42c69f96391b..a7c109075f645c 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.14', + 'v8_embedder_string': '-node.15', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/liftoff-compiler.cc b/deps/v8/src/wasm/baseline/liftoff-compiler.cc index a26df17225204f..c4b6cbdac28d21 100644 --- a/deps/v8/src/wasm/baseline/liftoff-compiler.cc +++ b/deps/v8/src/wasm/baseline/liftoff-compiler.cc @@ -2767,33 +2767,73 @@ class LiftoffCompiler { return index; } + bool IndexStaticallyInBounds(const LiftoffAssembler::VarState& index_slot, + int access_size, uintptr_t* offset) { + if (!index_slot.is_const()) return false; + + // Potentially zero extend index (which is a 32-bit constant). + const uintptr_t index = static_cast(index_slot.i32_const()); + const uintptr_t effective_offset = index + *offset; + + if (effective_offset < index // overflow + || !base::IsInBounds(effective_offset, access_size, + env_->min_memory_size)) { + return false; + } + + *offset = effective_offset; + return true; + } + void LoadMem(FullDecoder* decoder, LoadType type, const MemoryAccessImmediate& imm, const Value& index_val, Value* result) { ValueKind kind = type.value_type().kind(); + RegClass rc = reg_class_for(kind); if (!CheckSupportedType(decoder, kind, "load")) return; - LiftoffRegister full_index = __ PopToRegister(); - Register index = BoundsCheckMem(decoder, type.size(), imm.offset, - full_index, {}, kDontForceCheck); - if (index == no_reg) return; uintptr_t offset = imm.offset; - LiftoffRegList pinned = LiftoffRegList::ForRegs(index); - index = AddMemoryMasking(index, &offset, &pinned); - DEBUG_CODE_COMMENT("load from memory"); - Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); - LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned); - RegClass rc = reg_class_for(kind); - LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned)); - uint32_t protected_load_pc = 0; - __ Load(value, addr, index, offset, type, pinned, &protected_load_pc, true); - if (env_->use_trap_handler) { - AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, - protected_load_pc); + Register index = no_reg; + + // Only look at the slot, do not pop it yet (will happen in PopToRegister + // below, if this is not a statically-in-bounds index). + auto& index_slot = __ cache_state()->stack_state.back(); + if (IndexStaticallyInBounds(index_slot, type.size(), &offset)) { + __ cache_state()->stack_state.pop_back(); + DEBUG_CODE_COMMENT("load from memory (constant offset)"); + LiftoffRegList pinned; + Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); + LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned); + LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned)); + __ Load(value, mem, no_reg, offset, type, pinned, nullptr, true); + __ PushRegister(kind, value); + } else { + LiftoffRegister full_index = __ PopToRegister(); + index = BoundsCheckMem(decoder, type.size(), offset, full_index, {}, + kDontForceCheck); + if (index == no_reg) return; + + DEBUG_CODE_COMMENT("load from memory"); + LiftoffRegList pinned = LiftoffRegList::ForRegs(index); + index = AddMemoryMasking(index, &offset, &pinned); + + // Load the memory start address only now to reduce register pressure + // (important on ia32). + Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); + LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned); + LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned)); + + uint32_t protected_load_pc = 0; + __ Load(value, mem, index, offset, type, pinned, &protected_load_pc, + true); + if (env_->use_trap_handler) { + AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, + protected_load_pc); + } + __ PushRegister(kind, value); } - __ PushRegister(kind, value); - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(false, type.mem_type().representation(), index, offset, decoder->position()); } @@ -2836,7 +2876,7 @@ class LiftoffCompiler { } __ PushRegister(kS128, value); - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { // Again load extend is different. MachineRepresentation mem_rep = transform == LoadTransformationKind::kExtend @@ -2878,7 +2918,7 @@ class LiftoffCompiler { __ PushRegister(kS128, result); - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(false, type.mem_type().representation(), index, offset, decoder->position()); } @@ -2889,29 +2929,45 @@ class LiftoffCompiler { const Value& index_val, const Value& value_val) { ValueKind kind = type.value_type().kind(); if (!CheckSupportedType(decoder, kind, "store")) return; + LiftoffRegList pinned; LiftoffRegister value = pinned.set(__ PopToRegister()); - LiftoffRegister full_index = __ PopToRegister(pinned); - Register index = BoundsCheckMem(decoder, type.size(), imm.offset, - full_index, pinned, kDontForceCheck); - if (index == no_reg) return; uintptr_t offset = imm.offset; - pinned.set(index); - index = AddMemoryMasking(index, &offset, &pinned); - DEBUG_CODE_COMMENT("store to memory"); - Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); - LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned); - uint32_t protected_store_pc = 0; - LiftoffRegList outer_pinned; - if (FLAG_trace_wasm_memory) outer_pinned.set(index); - __ Store(addr, index, offset, value, type, outer_pinned, - &protected_store_pc, true); - if (env_->use_trap_handler) { - AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, - protected_store_pc); + Register index = no_reg; + + auto& index_slot = __ cache_state()->stack_state.back(); + if (IndexStaticallyInBounds(index_slot, type.size(), &offset)) { + __ cache_state()->stack_state.pop_back(); + DEBUG_CODE_COMMENT("store to memory (constant offset)"); + Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); + LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned); + __ Store(mem, no_reg, offset, value, type, pinned, nullptr, true); + } else { + LiftoffRegister full_index = __ PopToRegister(pinned); + index = BoundsCheckMem(decoder, type.size(), imm.offset, full_index, + pinned, kDontForceCheck); + if (index == no_reg) return; + + pinned.set(index); + index = AddMemoryMasking(index, &offset, &pinned); + DEBUG_CODE_COMMENT("store to memory"); + uint32_t protected_store_pc = 0; + // Load the memory start address only now to reduce register pressure + // (important on ia32). + Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); + LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned); + LiftoffRegList outer_pinned; + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) outer_pinned.set(index); + __ Store(mem, index, offset, value, type, outer_pinned, + &protected_store_pc, true); + if (env_->use_trap_handler) { + AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, + protected_store_pc); + } } - if (FLAG_trace_wasm_memory) { + + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(true, type.mem_rep(), index, offset, decoder->position()); } @@ -2940,7 +2996,7 @@ class LiftoffCompiler { AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, protected_store_pc); } - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(true, type.mem_rep(), index, offset, decoder->position()); } @@ -4156,9 +4212,9 @@ class LiftoffCompiler { Register addr = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); LOAD_INSTANCE_FIELD(addr, MemoryStart, kSystemPointerSize, pinned); LiftoffRegList outer_pinned; - if (FLAG_trace_wasm_memory) outer_pinned.set(index); + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) outer_pinned.set(index); __ AtomicStore(addr, index, offset, value, type, outer_pinned); - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(true, type.mem_rep(), index, offset, decoder->position()); } @@ -4184,7 +4240,7 @@ class LiftoffCompiler { __ AtomicLoad(value, addr, index, offset, type, pinned); __ PushRegister(kind, value); - if (FLAG_trace_wasm_memory) { + if (V8_UNLIKELY(FLAG_trace_wasm_memory)) { TraceMemoryOperation(false, type.mem_type().representation(), index, offset, decoder->position()); } From 5c5a93e53348fdcd4943b8883f5a6fe7767c54d0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 08:23:20 +0200 Subject: [PATCH 120/133] deps: V8: cherry-pick 2b77ca200c56 Original commit message: [wasm][liftoff] Always zero-extend 32 bit offsets The upper 32 bits of the 64 bit offset register are not guaranteed to be cleared, so a zero-extension is needed. We already do the zero-extension in the case of explicit bounds checking, but this should also be done if the trap handler is enabled. R=clemensb@chromium.org CC=jkummerow@chromium.org Bug: v8:11809 Change-Id: I21e2535c701041d11fa06c176fa683d82db0a3f1 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2917612 Commit-Queue: Thibaud Michaud Reviewed-by: Clemens Backes Cr-Commit-Position: refs/heads/master@{#74881} Refs: https://github.com/v8/v8/commit/2b77ca200c56667c68895e49c96c10ff77834f09 PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- .../wasm/baseline/arm/liftoff-assembler-arm.h | 3 +- .../baseline/arm64/liftoff-assembler-arm64.h | 15 +++-- .../baseline/ia32/liftoff-assembler-ia32.h | 3 +- deps/v8/src/wasm/baseline/liftoff-assembler.h | 2 +- deps/v8/src/wasm/baseline/liftoff-compiler.cc | 8 ++- .../wasm/baseline/x64/liftoff-assembler-x64.h | 6 +- .../mjsunit/regress/wasm/regress-11809.js | 58 +++++++++++++++++++ 8 files changed, 84 insertions(+), 13 deletions(-) create mode 100644 deps/v8/test/mjsunit/regress/wasm/regress-11809.js diff --git a/common.gypi b/common.gypi index a7c109075f645c..ee91fb1df6d913 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.15', + 'v8_embedder_string': '-node.16', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/arm/liftoff-assembler-arm.h b/deps/v8/src/wasm/baseline/arm/liftoff-assembler-arm.h index 62917ab0a3456a..7acdf635c90ef0 100644 --- a/deps/v8/src/wasm/baseline/arm/liftoff-assembler-arm.h +++ b/deps/v8/src/wasm/baseline/arm/liftoff-assembler-arm.h @@ -766,7 +766,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uint32_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { // Offsets >=2GB are statically OOB on 32-bit systems. DCHECK_LE(offset_imm, std::numeric_limits::max()); liftoff::LoadInternal(this, dst, src_addr, offset_reg, diff --git a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h index af794f9a9d5e66..549bbe7f10a9b8 100644 --- a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h +++ b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h @@ -126,9 +126,13 @@ inline CPURegister AcquireByType(UseScratchRegisterScope* temps, template inline MemOperand GetMemOp(LiftoffAssembler* assm, UseScratchRegisterScope* temps, Register addr, - Register offset, T offset_imm) { + Register offset, T offset_imm, + bool i64_offset = false) { if (offset.is_valid()) { - if (offset_imm == 0) return MemOperand(addr.X(), offset.X()); + if (offset_imm == 0) { + return i64_offset ? MemOperand(addr.X(), offset.X()) + : MemOperand(addr.X(), offset.W(), UXTW); + } Register tmp = temps->AcquireX(); DCHECK_GE(kMaxUInt32, offset_imm); assm->Add(tmp, offset.X(), offset_imm); @@ -490,10 +494,11 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { UseScratchRegisterScope temps(this); - MemOperand src_op = - liftoff::GetMemOp(this, &temps, src_addr, offset_reg, offset_imm); + MemOperand src_op = liftoff::GetMemOp(this, &temps, src_addr, offset_reg, + offset_imm, i64_offset); if (protected_load_pc) *protected_load_pc = pc_offset(); switch (type.value()) { case LoadType::kI32Load8U: diff --git a/deps/v8/src/wasm/baseline/ia32/liftoff-assembler-ia32.h b/deps/v8/src/wasm/baseline/ia32/liftoff-assembler-ia32.h index 83b00d4a2ad7db..e597467c7342c7 100644 --- a/deps/v8/src/wasm/baseline/ia32/liftoff-assembler-ia32.h +++ b/deps/v8/src/wasm/baseline/ia32/liftoff-assembler-ia32.h @@ -388,7 +388,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uint32_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { // Offsets >=2GB are statically OOB on 32-bit systems. DCHECK_LE(offset_imm, std::numeric_limits::max()); DCHECK_EQ(type.value_type() == kWasmI64, dst.is_gp_pair()); diff --git a/deps/v8/src/wasm/baseline/liftoff-assembler.h b/deps/v8/src/wasm/baseline/liftoff-assembler.h index 3090bc81659779..dbff396f82bc63 100644 --- a/deps/v8/src/wasm/baseline/liftoff-assembler.h +++ b/deps/v8/src/wasm/baseline/liftoff-assembler.h @@ -669,7 +669,7 @@ class LiftoffAssembler : public TurboAssembler { inline void Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, uint32_t* protected_load_pc = nullptr, - bool is_load_mem = false); + bool is_load_mem = false, bool i64_offset = false); inline void Store(Register dst_addr, Register offset_reg, uintptr_t offset_imm, LiftoffRegister src, StoreType type, LiftoffRegList pinned, diff --git a/deps/v8/src/wasm/baseline/liftoff-compiler.cc b/deps/v8/src/wasm/baseline/liftoff-compiler.cc index c4b6cbdac28d21..84d217b2e42163 100644 --- a/deps/v8/src/wasm/baseline/liftoff-compiler.cc +++ b/deps/v8/src/wasm/baseline/liftoff-compiler.cc @@ -2798,6 +2798,7 @@ class LiftoffCompiler { // Only look at the slot, do not pop it yet (will happen in PopToRegister // below, if this is not a statically-in-bounds index). auto& index_slot = __ cache_state()->stack_state.back(); + bool i64_offset = index_val.type == kWasmI64; if (IndexStaticallyInBounds(index_slot, type.size(), &offset)) { __ cache_state()->stack_state.pop_back(); DEBUG_CODE_COMMENT("load from memory (constant offset)"); @@ -2805,7 +2806,8 @@ class LiftoffCompiler { Register mem = pinned.set(__ GetUnusedRegister(kGpReg, pinned)).gp(); LOAD_INSTANCE_FIELD(mem, MemoryStart, kSystemPointerSize, pinned); LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned)); - __ Load(value, mem, no_reg, offset, type, pinned, nullptr, true); + __ Load(value, mem, no_reg, offset, type, pinned, nullptr, true, + i64_offset); __ PushRegister(kind, value); } else { LiftoffRegister full_index = __ PopToRegister(); @@ -2824,8 +2826,8 @@ class LiftoffCompiler { LiftoffRegister value = pinned.set(__ GetUnusedRegister(rc, pinned)); uint32_t protected_load_pc = 0; - __ Load(value, mem, index, offset, type, pinned, &protected_load_pc, - true); + __ Load(value, mem, index, offset, type, pinned, &protected_load_pc, true, + i64_offset); if (env_->use_trap_handler) { AddOutOfLineTrap(decoder, WasmCode::kThrowWasmTrapMemOutOfBounds, protected_load_pc); diff --git a/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h b/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h index e8a57bafca1f35..68619a9f1b3e49 100644 --- a/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h +++ b/deps/v8/src/wasm/baseline/x64/liftoff-assembler-x64.h @@ -389,7 +389,11 @@ void LiftoffAssembler::AtomicLoad(LiftoffRegister dst, Register src_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { + if (offset_reg != no_reg && !i64_offset) { + AssertZeroExtended(offset_reg); + } Operand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm); if (protected_load_pc) *protected_load_pc = pc_offset(); switch (type.value()) { diff --git a/deps/v8/test/mjsunit/regress/wasm/regress-11809.js b/deps/v8/test/mjsunit/regress/wasm/regress-11809.js new file mode 100644 index 00000000000000..890e26c609e151 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/wasm/regress-11809.js @@ -0,0 +1,58 @@ +// Copyright 2021 the V8 project authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +// Flags: --enable-testing-opcode-in-wasm --nowasm-tier-up --wasm-tier-mask-for-testing=2 + +load("test/mjsunit/wasm/wasm-module-builder.js"); + +var instance = (function () { + var builder = new WasmModuleBuilder(); + builder.addMemory(1, 1, false /* exported */); + + var sig_index = builder.addType(makeSig( + [kWasmI32, kWasmI32, kWasmI32, kWasmI32, kWasmI32, kWasmI32, kWasmI32, + kWasmI32], + [kWasmI32])); + var sig_three = builder.addType(makeSig( + [kWasmI64, kWasmI64, kWasmI64, kWasmI64, kWasmI64, kWasmI64, kWasmI64, + kWasmI64], + [])); + + var zero = builder.addFunction("zero", kSig_i_i); + var one = builder.addFunction("one", sig_index); + var two = builder.addFunction("two", kSig_v_i); + var three = builder.addFunction("three", sig_three).addBody([]); + + zero.addBody([kExprLocalGet, 0, kExprI32LoadMem, 0, 0]); + + one.addBody([ + kExprLocalGet, 7, + kExprCallFunction, zero.index]); + + two.addBody([ + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprI64Const, 0x81, 0x80, 0x80, 0x80, 0x10, + kExprCallFunction, three.index, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprI32Const, 0, + kExprCallFunction, one.index, + kExprDrop, + ]).exportFunc(); + + return builder.instantiate({}); +})(); + +instance.exports.two() From 2657c305cb6d422c487cfab446a8c27edb71415a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 08:23:33 +0200 Subject: [PATCH 121/133] deps: V8: cherry-pick 56fe020eec0c Original commit message: [wasm][arm64] Always zero-extend 32 bit offsets, for realz We've already been zero-extending 32-bit offset registers since https://chromium-review.googlesource.com/c/v8/v8/+/2917612, but that patch only covered the case where offset_imm == 0. When there is a non-zero offset, we need the same fix. Bug: chromium:1224882,v8:11809 Change-Id: I1908f735929798f411346807fc4f3c79d8e04362 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2998582 Commit-Queue: Jakob Kummerow Reviewed-by: Clemens Backes Cr-Commit-Position: refs/heads/master@{#75500} Refs: https://github.com/v8/v8/commit/56fe020eec0c35e9816590114b1d80836a504156 Fixes: https://github.com/nodejs/node/issues/39327 PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- .../baseline/arm64/liftoff-assembler-arm64.h | 12 +++++++++--- .../test/mjsunit/regress/wasm/regress-11809.js | 16 +++++++++++----- 3 files changed, 21 insertions(+), 9 deletions(-) diff --git a/common.gypi b/common.gypi index ee91fb1df6d913..36e5de56341a51 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.16', + 'v8_embedder_string': '-node.17', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h index 549bbe7f10a9b8..bea5100ef3e9f8 100644 --- a/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h +++ b/deps/v8/src/wasm/baseline/arm64/liftoff-assembler-arm64.h @@ -133,10 +133,16 @@ inline MemOperand GetMemOp(LiftoffAssembler* assm, return i64_offset ? MemOperand(addr.X(), offset.X()) : MemOperand(addr.X(), offset.W(), UXTW); } - Register tmp = temps->AcquireX(); DCHECK_GE(kMaxUInt32, offset_imm); - assm->Add(tmp, offset.X(), offset_imm); - return MemOperand(addr.X(), tmp); + if (i64_offset) { + Register tmp = temps->AcquireX(); + assm->Add(tmp, offset.X(), offset_imm); + return MemOperand(addr.X(), tmp); + } else { + Register tmp = temps->AcquireW(); + assm->Add(tmp, offset.W(), offset_imm); + return MemOperand(addr.X(), tmp, UXTW); + } } return MemOperand(addr.X(), offset_imm); } diff --git a/deps/v8/test/mjsunit/regress/wasm/regress-11809.js b/deps/v8/test/mjsunit/regress/wasm/regress-11809.js index 890e26c609e151..eef8c291f6e6db 100644 --- a/deps/v8/test/mjsunit/regress/wasm/regress-11809.js +++ b/deps/v8/test/mjsunit/regress/wasm/regress-11809.js @@ -2,11 +2,12 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // -// Flags: --enable-testing-opcode-in-wasm --nowasm-tier-up --wasm-tier-mask-for-testing=2 +// Flags: --enable-testing-opcode-in-wasm --nowasm-tier-up +// Flags: --wasm-tier-mask-for-testing=2 load("test/mjsunit/wasm/wasm-module-builder.js"); -var instance = (function () { +function InstanceMaker(offset) { var builder = new WasmModuleBuilder(); builder.addMemory(1, 1, false /* exported */); @@ -24,7 +25,7 @@ var instance = (function () { var two = builder.addFunction("two", kSig_v_i); var three = builder.addFunction("three", sig_three).addBody([]); - zero.addBody([kExprLocalGet, 0, kExprI32LoadMem, 0, 0]); + zero.addBody([kExprLocalGet, 0, kExprI32LoadMem, 0, offset]); one.addBody([ kExprLocalGet, 7, @@ -53,6 +54,11 @@ var instance = (function () { ]).exportFunc(); return builder.instantiate({}); -})(); +} -instance.exports.two() +var instance = InstanceMaker(0); +instance.exports.two(); + +// Regression test for crbug.com/1224882. +var instance_with_offset = InstanceMaker(4); +instance_with_offset.exports.two(); From 142ce6838beda039ca118650b2a7eb0d3714c9d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 13:10:39 +0200 Subject: [PATCH 122/133] deps: V8: cherry-pick 3805a698f7b6 Original commit message: PPC/s390: [wasm][liftoff] Always zero-extend 32 bit offsets Port 2b77ca200c56667c68895e49c96c10ff77834f09 Original Commit Message: The upper 32 bits of the 64 bit offset register are not guaranteed to be cleared, so a zero-extension is needed. We already do the zero-extension in the case of explicit bounds checking, but this should also be done if the trap handler is enabled. R=thibaudm@chromium.org, joransiu@ca.ibm.com, junyan@redhat.com, midawson@redhat.com BUG= LOG=N Change-Id: Ife3ae4f93b85fe1b2c76fe4b98fa408b5b51ed71 Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2929661 Reviewed-by: Junliang Yan Commit-Queue: Milad Fa Cr-Commit-Position: refs/heads/master@{#74886} Refs: https://github.com/v8/v8/commit/3805a698f7b6803dd6ee002cfdda71296c71b30b PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h | 3 ++- deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h | 8 +++++++- 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/common.gypi b/common.gypi index 36e5de56341a51..88764c8f6b75a8 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.17', + 'v8_embedder_string': '-node.18', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h index 4e99821a27d563..bedee1a939c007 100644 --- a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h +++ b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h @@ -137,7 +137,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { bailout(kUnsupportedArchitecture, "Load"); } diff --git a/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h b/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h index 8560c91553f8cc..04f30939fdbab6 100644 --- a/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h +++ b/deps/v8/src/wasm/baseline/s390/liftoff-assembler-s390.h @@ -277,11 +277,17 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { UseScratchRegisterScope temps(this); if (!is_int20(offset_imm)) { mov(ip, Operand(offset_imm)); if (offset_reg != no_reg) { + if (!i64_offset) { + // Clear the upper 32 bits of the 64 bit offset register. + llgfr(r0, offset_reg); + offset_reg = r0; + } AddS64(ip, offset_reg); } offset_reg = ip; From 0e64bd0dd6635e8bc6218f4d971eeb3053b6fba2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 13:11:11 +0200 Subject: [PATCH 123/133] deps: V8: cherry-pick 359d44df4cdd Original commit message: [riscv64] Fix build failed Port 2b77ca200c56667c68895e49c96c10ff77834f09 Change-Id: Ie953a1d54f5529423ae35d1b1cd3ca25e8101c6e Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2931577 Auto-Submit: Yahan Lu Commit-Queue: Brice Dobry Reviewed-by: Brice Dobry Cr-Commit-Position: refs/heads/master@{#74937} Refs: https://github.com/v8/v8/commit/359d44df4cdd9cdb40cb683b47c2bea6bb7609f6 PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- deps/v8/src/wasm/baseline/riscv64/liftoff-assembler-riscv64.h | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/common.gypi b/common.gypi index 88764c8f6b75a8..d4f1d425f83958 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.18', + 'v8_embedder_string': '-node.19', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/riscv64/liftoff-assembler-riscv64.h b/deps/v8/src/wasm/baseline/riscv64/liftoff-assembler-riscv64.h index 47f8ce2125d439..bb6c3bcad886a1 100644 --- a/deps/v8/src/wasm/baseline/riscv64/liftoff-assembler-riscv64.h +++ b/deps/v8/src/wasm/baseline/riscv64/liftoff-assembler-riscv64.h @@ -446,7 +446,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm); if (protected_load_pc) *protected_load_pc = pc_offset(); From 4507714f9daa73dd8c355f5f9adf4096441a98d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C3=ABl=20Zasso?= Date: Sat, 10 Jul 2021 13:15:42 +0200 Subject: [PATCH 124/133] deps: V8: backport 5c76da8ddcf8 Original commit message: [mips][wasm][liftoff] Fix compile failed Port 2b77ca200c56667c68895e49c96c10ff77834f09 Bug: v8:11809 Change-Id: Idbbbc10d1339d6c8463686b6e701fb601a217cab Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/2931557 Reviewed-by: Zhao Jiazhong Commit-Queue: Zhao Jiazhong Auto-Submit: Liu yu Cr-Commit-Position: refs/heads/master@{#74934} Refs: https://github.com/v8/v8/commit/5c76da8ddcf89297a8dc2606b68da97d7a5329cb PR-URL: https://github.com/nodejs/node/pull/39337 Reviewed-By: Matteo Collina Reviewed-By: James M Snell --- common.gypi | 2 +- deps/v8/src/wasm/baseline/mips/liftoff-assembler-mips.h | 3 ++- deps/v8/src/wasm/baseline/mips64/liftoff-assembler-mips64.h | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/common.gypi b/common.gypi index d4f1d425f83958..71862791dae3be 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.19', + 'v8_embedder_string': '-node.20', ##### V8 defaults for Node.js ##### diff --git a/deps/v8/src/wasm/baseline/mips/liftoff-assembler-mips.h b/deps/v8/src/wasm/baseline/mips/liftoff-assembler-mips.h index ca715a8a328114..d078fd5e429429 100644 --- a/deps/v8/src/wasm/baseline/mips/liftoff-assembler-mips.h +++ b/deps/v8/src/wasm/baseline/mips/liftoff-assembler-mips.h @@ -491,7 +491,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uint32_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { Register src = no_reg; if (offset_reg != no_reg) { src = GetUnusedRegister(kGpReg, pinned).gp(); diff --git a/deps/v8/src/wasm/baseline/mips64/liftoff-assembler-mips64.h b/deps/v8/src/wasm/baseline/mips64/liftoff-assembler-mips64.h index a5a9f8ce231b46..dfbd8d6a752ee1 100644 --- a/deps/v8/src/wasm/baseline/mips64/liftoff-assembler-mips64.h +++ b/deps/v8/src/wasm/baseline/mips64/liftoff-assembler-mips64.h @@ -470,7 +470,8 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr, void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr, Register offset_reg, uintptr_t offset_imm, LoadType type, LiftoffRegList pinned, - uint32_t* protected_load_pc, bool is_load_mem) { + uint32_t* protected_load_pc, bool is_load_mem, + bool i64_offset) { MemOperand src_op = liftoff::GetMemOp(this, src_addr, offset_reg, offset_imm); if (protected_load_pc) *protected_load_pc = pc_offset(); From 630266cba2022a7a6be7bd31b3094614a08b58f9 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Fri, 9 Jul 2021 18:19:41 -0700 Subject: [PATCH 125/133] debugger: indicate server is ending MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently, we say "listening" when we are ending the server. Change it to "ending". Fixes: https://github.com/nodejs/node/issues/39272 PR-URL: https://github.com/nodejs/node/pull/39334 Reviewed-By: Michaël Zasso Reviewed-By: Yash Ladha --- src/inspector_socket_server.cc | 6 ++++- .../test-debugger-restart-message.js | 24 +++---------------- 2 files changed, 8 insertions(+), 22 deletions(-) rename test/{known_issues => sequential}/test-debugger-restart-message.js (50%) diff --git a/src/inspector_socket_server.cc b/src/inspector_socket_server.cc index 29e0c128026ed0..299664da9a1693 100644 --- a/src/inspector_socket_server.cc +++ b/src/inspector_socket_server.cc @@ -234,6 +234,7 @@ void PrintDebuggerReadyMessage( const std::string& host, const std::vector& server_sockets, const std::vector& ids, + const char* verb, bool publish_uid_stderr, FILE* out) { if (!publish_uid_stderr || out == nullptr) { @@ -241,7 +242,8 @@ void PrintDebuggerReadyMessage( } for (const auto& server_socket : server_sockets) { for (const std::string& id : ids) { - fprintf(out, "Debugger listening on %s\n", + fprintf(out, "Debugger %s on %s\n", + verb, FormatWsAddress(host, server_socket->port(), id, true).c_str()); } } @@ -300,6 +302,7 @@ void InspectorSocketServer::SessionTerminated(int session_id) { PrintDebuggerReadyMessage(host_, server_sockets_, delegate_->GetTargetIds(), + "ending", inspect_publish_uid_.console, out_); } @@ -425,6 +428,7 @@ bool InspectorSocketServer::Start() { PrintDebuggerReadyMessage(host_, server_sockets_, delegate_->GetTargetIds(), + "listening", inspect_publish_uid_.console, out_); return true; diff --git a/test/known_issues/test-debugger-restart-message.js b/test/sequential/test-debugger-restart-message.js similarity index 50% rename from test/known_issues/test-debugger-restart-message.js rename to test/sequential/test-debugger-restart-message.js index 478806effbb39c..bcd06b4e230131 100644 --- a/test/known_issues/test-debugger-restart-message.js +++ b/test/sequential/test-debugger-restart-message.js @@ -1,30 +1,12 @@ 'use strict'; -// Refs: https://github.com/nodejs/node/issues/39272 - const common = require('../common'); -const assert = require('assert'); - -// When this is moved out of known_issues, this skip can be removed. -if (common.isOSX) { - assert.fail('does not fail reliably on macOS in CI'); -} +common.skipIfInspectorDisabled(); -// When this is moved out of known_issues, this can be removed and replaced with -// the commented-out use of common.skipIfInspectorDisabled() below. -if (!process.features.inspector) { - assert.fail('Known issues test should fail, so if the inspector is disabled'); -} - -// Will need to uncomment this when moved out of known_issues. -// common.skipIfInspectorDisabled(); +const assert = require('assert'); -// This can be reduced to 2 or even 1 (and the loop removed) once the debugger -// is fixed. It's set higher to make sure that the error is tripped reliably -// in CI. On most systems, the error will be tripped on the first test, but -// on a few platforms in CI, it needs to be many times. -const RESTARTS = 16; +const RESTARTS = 10; const fixtures = require('../common/fixtures'); const startCLI = require('../common/debugger'); From 2573bf511638b6328a8949707acb4b2238efd5c6 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Mon, 5 Jul 2021 09:28:21 -0700 Subject: [PATCH 126/133] doc: update AUTHORS MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39277 Reviewed-By: Michaël Zasso --- AUTHORS | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS b/AUTHORS index 59b9fab48ac60c..6d9af83ca23a64 100644 --- a/AUTHORS +++ b/AUTHORS @@ -3319,5 +3319,6 @@ Houssem Chebab Davidson Francis Rohan Sharma AkshayK +FrankEntriken <42781627+FrankEntriken@users.noreply.github.com> # Generated by tools/update-authors.js From 64a185e595cdd98b082d25e269c84a70be481c11 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 10 Jul 2021 10:33:54 -0700 Subject: [PATCH 127/133] doc: use consistent abbreviation formatting MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://docs.microsoft.com/en-us/style-guide/a-z-word-list-term-collections/term-collections/bits-bytes-terms PR-URL: https://github.com/nodejs/node/pull/39343 Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Darshan Sen --- doc/api/buffer.md | 6 +++--- doc/api/cli.md | 8 ++++---- doc/api/errors.md | 4 ++-- doc/api/fs.md | 4 ++-- doc/api/http.md | 7 ++++--- doc/api/stream.md | 6 +++--- doc/guides/investigating_native_memory_leak.md | 4 ++-- doc/node.1 | 2 +- 8 files changed, 21 insertions(+), 20 deletions(-) diff --git a/doc/api/buffer.md b/doc/api/buffer.md index d762b4b0e3ae91..b9a83959ccab83 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -785,7 +785,7 @@ initialized*. The contents of the newly created `Buffer` are unknown and such `Buffer` instances with zeroes. When using [`Buffer.allocUnsafe()`][] to allocate new `Buffer` instances, -allocations under 4KB are sliced from a single pre-allocated `Buffer`. This +allocations under 4 KB are sliced from a single pre-allocated `Buffer`. This allows applications to avoid the garbage collection overhead of creating many individually allocated `Buffer` instances. This approach improves both performance and memory usage by eliminating the need to track and clean up as @@ -5027,9 +5027,9 @@ changes: * {integer} The largest size allowed for a single `Buffer` instance. -On 32-bit architectures, this value currently is 230 - 1 (~1GB). +On 32-bit architectures, this value currently is 230 - 1 (~1 GB). -On 64-bit architectures, this value currently is 232 (~4GB). +On 64-bit architectures, this value currently is 232 (~4 GB). It reflects [`v8::TypedArray::kMaxLength`][] under the hood. diff --git a/doc/api/cli.md b/doc/api/cli.md index d4939e2716c314..8c462bbd6e2c13 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -584,10 +584,10 @@ added: changes: - version: v13.13.0 pr-url: https://github.com/nodejs/node/pull/32520 - description: Change maximum default size of HTTP headers from 8KB to 16KB. + description: Change maximum default size of HTTP headers from 8 KB to 16 KB. --> -Specify the maximum size, in bytes, of HTTP headers. Defaults to 16KB. +Specify the maximum size, in bytes, of HTTP headers. Defaults to 16 KB. ### `--napi-modules` Too much HTTP header data was received. In order to protect against malicious or -malconfigured clients, if more than 8KB of HTTP header data is received then +malconfigured clients, if more than 8 KB of HTTP header data is received then HTTP parsing will abort without a request or response object being created, and an `Error` with this code will be emitted. diff --git a/doc/api/fs.md b/doc/api/fs.md index 222686c555070e..feba68ddc6c108 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -2962,8 +2962,8 @@ to read a complete file into memory. The additional read overhead can vary broadly on different systems and depends on the type of file being read. If the file type is not a regular file (a pipe for instance) and Node.js is unable to determine an actual file size, each read -operation will load on 64kb of data. For regular files, each read will process -512kb of data. +operation will load on 64 KB of data. For regular files, each read will process +512 KB of data. For applications that require as-fast-as-possible reading of file contents, it is better to use `fs.read()` directly and for application code to manage diff --git a/doc/api/http.md b/doc/api/http.md index e3d2b08deae3c8..fdb77648b9c56e 100644 --- a/doc/api/http.md +++ b/doc/api/http.md @@ -2677,7 +2677,7 @@ changes: * `maxHeaderSize` {number} Optionally overrides the value of [`--max-http-header-size`][] for requests received by this server, i.e. the maximum length of request headers in bytes. - **Default:** 16384 (16KB). + **Default:** 16384 (16 KB). * `requestListener` {Function} * Returns: {http.Server} @@ -2787,7 +2787,8 @@ added: * {number} Read-only property specifying the maximum allowed size of HTTP headers in bytes. -Defaults to 8KB. Configurable using the [`--max-http-header-size`][] CLI option. +Defaults to 8 KB. Configurable using the [`--max-http-header-size`][] CLI +option. This can be overridden for servers and client requests by passing the `maxHeaderSize` option. @@ -2853,7 +2854,7 @@ changes: * `maxHeaderSize` {number} Optionally overrides the value of [`--max-http-header-size`][] for requests received from the server, i.e. the maximum length of response headers in bytes. - **Default:** 16384 (16KB). + **Default:** 16384 (16 KB). * `method` {string} A string specifying the HTTP request method. **Default:** `'GET'`. * `path` {string} Request path. Should include query string if any. diff --git a/doc/api/stream.md b/doc/api/stream.md index 289890840616ca..8ff36423cb246e 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -1509,7 +1509,7 @@ If the loop terminates with a `break`, `return`, or a `throw`, the stream will be destroyed. In other terms, iterating over a stream will consume the stream fully. The stream will be read in chunks of size equal to the `highWaterMark` option. In the code example above, data will be in a single chunk if the file -has less then 64KB of data because no `highWaterMark` option is provided to +has less then 64 KB of data because no `highWaterMark` option is provided to [`fs.createReadStream()`][]. ##### `readable.iterator([options])` @@ -2053,7 +2053,7 @@ changes: * `options` {Object} * `highWaterMark` {number} Buffer level when [`stream.write()`][stream-write] starts returning `false`. **Default:** - `16384` (16KB), or `16` for `objectMode` streams. + `16384` (16 KB), or `16` for `objectMode` streams. * `decodeStrings` {boolean} Whether to encode `string`s passed to [`stream.write()`][stream-write] to `Buffer`s (with the encoding specified in the [`stream.write()`][stream-write] call) before passing @@ -2416,7 +2416,7 @@ changes: * `options` {Object} * `highWaterMark` {number} The maximum [number of bytes][hwm-gotcha] to store in the internal buffer before ceasing to read from the underlying resource. - **Default:** `16384` (16KB), or `16` for `objectMode` streams. + **Default:** `16384` (16 KB), or `16` for `objectMode` streams. * `encoding` {string} If specified, then buffers will be decoded to strings using the specified encoding. **Default:** `null`. * `objectMode` {boolean} Whether this stream should behave diff --git a/doc/guides/investigating_native_memory_leak.md b/doc/guides/investigating_native_memory_leak.md index 55ba1b1ec3d60b..3c5664a77812c1 100644 --- a/doc/guides/investigating_native_memory_leak.md +++ b/doc/guides/investigating_native_memory_leak.md @@ -92,7 +92,7 @@ operating systems will clean up the memory of the process after the shutdown while attempting to free all memory to get a clean report may have a negative impact on the code complexity and shutdown times. Node.js does a pretty good job only leaving on -the order of 6KB that are not freed on shutdown. +the order of 6 KB that are not freed on shutdown. ## An obvious memory leak @@ -100,7 +100,7 @@ Leaks can be introduced in native addons and the following is a simple example leak based on the "Hello world" addon from [node-addon-examples](https://github.com/nodejs/node-addon-examples). -In this example, a loop which allocates ~1MB of memory and never frees it +In this example, a loop which allocates ~1 MB of memory and never frees it has been added: ```cpp diff --git a/doc/node.1 b/doc/node.1 index a0bbb2b781b7ba..58c76663c2f42f 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -266,7 +266,7 @@ This flag is inherited from V8 and is subject to change upstream. It may disappear in a non-semver-major release. . .It Fl -max-http-header-size Ns = Ns Ar size -Specify the maximum size of HTTP headers in bytes. Defaults to 16KB. +Specify the maximum size of HTTP headers in bytes. Defaults to 16 KB. . .It Fl -napi-modules This option is a no-op. From 0b3b2695bc28ef4ca65649b7750da64211de01b6 Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 10 Jul 2021 11:50:40 -0700 Subject: [PATCH 128/133] doc: do not use tilde for "about" or "approximately" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Refs: https://docs.microsoft.com/en-us/style-guide/a-z-word-list-term-collections/term-collections/special-characters PR-URL: https://github.com/nodejs/node/pull/39344 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Darshan Sen --- doc/api/buffer.md | 5 +++-- doc/guides/investigating_native_memory_leak.md | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/doc/api/buffer.md b/doc/api/buffer.md index b9a83959ccab83..a8a7635dbdc748 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -5027,9 +5027,10 @@ changes: * {integer} The largest size allowed for a single `Buffer` instance. -On 32-bit architectures, this value currently is 230 - 1 (~1 GB). +On 32-bit architectures, this value currently is 230 - 1 (about 1 +GB). -On 64-bit architectures, this value currently is 232 (~4 GB). +On 64-bit architectures, this value currently is 232 (about 4 GB). It reflects [`v8::TypedArray::kMaxLength`][] under the hood. diff --git a/doc/guides/investigating_native_memory_leak.md b/doc/guides/investigating_native_memory_leak.md index 3c5664a77812c1..f808675f5328bc 100644 --- a/doc/guides/investigating_native_memory_leak.md +++ b/doc/guides/investigating_native_memory_leak.md @@ -100,8 +100,8 @@ Leaks can be introduced in native addons and the following is a simple example leak based on the "Hello world" addon from [node-addon-examples](https://github.com/nodejs/node-addon-examples). -In this example, a loop which allocates ~1 MB of memory and never frees it -has been added: +In this example, a loop which allocates approximately 1 MB of memory and never +frees it has been added: ```cpp void* malloc_holder = nullptr; From 29c9cc8f0324db5d29a7f667ad079729a5acd32f Mon Sep 17 00:00:00 2001 From: Rich Trott Date: Sat, 10 Jul 2021 11:57:57 -0700 Subject: [PATCH 129/133] doc: do not use & for "and" in text MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This also changes a line in the man page to be sentence case. Refs: https://docs.microsoft.com/en-us/style-guide/a-z-word-list-term-collections/term-collections/special-characters PR-URL: https://github.com/nodejs/node/pull/39345 Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Tobias Nießen Reviewed-By: Darshan Sen Reviewed-By: Michaël Zasso --- doc/guides/writing-tests.md | 2 +- doc/node.1 | 2 +- onboarding.md | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/doc/guides/writing-tests.md b/doc/guides/writing-tests.md index 7d33ee15858cc5..1972c897b0a5eb 100644 --- a/doc/guides/writing-tests.md +++ b/doc/guides/writing-tests.md @@ -20,7 +20,7 @@ Add tests when: ## Test directory structure -See [directory structure overview][] for outline of existing test & locations. +See [directory structure overview][] for outline of existing test and locations. When deciding on whether to expand an existing test file or create a new one, consider going through the files related to the subsystem. For example, look for `test-streams` when writing a test for `lib/streams.js`. diff --git a/doc/node.1 b/doc/node.1 index 58c76663c2f42f..2cfc4dbda91d07 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -698,7 +698,7 @@ Documentation: .Sy https://nodejs.org/api/ . .Pp -GitHub repository & Issue Tracker: +GitHub repository and issue tracker: .Sy https://github.com/nodejs/node . .Pp diff --git a/onboarding.md b/onboarding.md index fddc6aa64f16b3..938d683ee50e4a 100644 --- a/onboarding.md +++ b/onboarding.md @@ -26,7 +26,7 @@ onboarding session. * This session will cover: * [local setup](#local-setup) - * [project goals & values](#project-goals--values) + * [project goals and values](#project-goals-and-values) * [managing the issue tracker](#managing-the-issue-tracker) * [reviewing PRs](#reviewing-prs) * [landing PRs](#landing-prs) @@ -60,7 +60,7 @@ The project has two venues for real-time discussion: * [`#nodejs-dev`](https://openjs-foundation.slack.com/archives/C019Y2T6STH) on the [OpenJS Foundation](https://slack-invite.openjsf.org/) -## Project goals & values +## Project goals and values * Collaborators are the collective owners of the project * The project has the goals of its contributors From fb6616ecbb498592fd3e58f9b343500a18290bfe Mon Sep 17 00:00:00 2001 From: Danielle Adams Date: Mon, 28 Jun 2021 12:58:22 -0400 Subject: [PATCH 130/133] doc: add text about moving long commit lists out of PR description MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39186 Reviewed-By: Rich Trott Reviewed-By: James M Snell Reviewed-By: Michaël Zasso Reviewed-By: Beth Griggs Reviewed-By: Michael Dawson --- doc/guides/releases.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/doc/guides/releases.md b/doc/guides/releases.md index 5087f4e0bb69be..8a357b595a8c89 100644 --- a/doc/guides/releases.md +++ b/doc/guides/releases.md @@ -398,7 +398,19 @@ Create a pull request targeting the correct release line. For example, a `v5.3.0-proposal` PR should target `v5.x`, not master. Paste the CHANGELOG modifications into the body of the PR so that collaborators can see what is changing. These PRs should be left open for at least 24 hours, and can be -updated as new commits land. +updated as new commits land. If the CHANGELOG pasted into the pull request +is long enough that it slows down the GitHub UI, consider pasting the commits +into `
    ` tags or in follow up comments. + +If using the `
    ` tag, use the following format: + +```markdown +
    +Commits + +* Full list of commits... +
    +``` If you need any additional information about any of the commits, this PR is a good place to @-mention the relevant contributors. From ecf627a9af90b0a4ae125f076391c9040dd6e2e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sun, 11 Jul 2021 17:59:13 +0200 Subject: [PATCH 131/133] lib: rename TransferedReadableStream etc MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39352 Reviewed-By: Michaël Zasso Reviewed-By: Robert Nagy Reviewed-By: Luigi Pinca Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- lib/internal/webstreams/readablestream.js | 34 +++++++++++----------- lib/internal/webstreams/transfer.js | 4 +-- lib/internal/webstreams/transformstream.js | 8 ++--- lib/internal/webstreams/util.js | 2 +- lib/internal/webstreams/writablestream.js | 8 ++--- 5 files changed, 28 insertions(+), 28 deletions(-) diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index a8024c64af2353..978281dfa324e8 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -461,7 +461,7 @@ class ReadableStream { // Web Platform Tests that check for use of a modified // Promise.prototype.then. Since the await keyword // uses Promise.prototype.then, it is open to prototype - // polution, which causes the test to fail. The other + // pollution, which causes the test to fail. The other // await uses here do not trigger that failure because // the test that fails does not trigger those code paths. next() { @@ -526,7 +526,7 @@ class ReadableStream { return { data: { port: this[kState].transfer.port2 }, deserializeInfo: - 'internal/webstreams/readablestream:TransferedReadableStream' + 'internal/webstreams/readablestream:TransferredReadableStream' }; } @@ -561,7 +561,7 @@ ObjectDefineProperties(ReadableStream.prototype, { tee: { enumerable: true }, }); -function TransferedReadableStream() { +function TransferredReadableStream() { return makeTransferable(ReflectConstruct( function() { this[kType] = 'ReadableStream'; @@ -579,7 +579,7 @@ function TransferedReadableStream() { }, [], ReadableStream)); } -TransferedReadableStream.prototype[kDeserialize] = () => {}; +TransferredReadableStream.prototype[kDeserialize] = () => {}; class ReadableStreamBYOBRequest { [kType] = 'ReadableStreamBYOBRequest'; @@ -1478,8 +1478,8 @@ function readableByteStreamControllerConvertPullIntoDescriptor(desc) { if (bytesFilled > byteLength) throw new ERR_INVALID_STATE.RangeError('The buffer size is invalid'); assert(!(bytesFilled % elementSize)); - const transferedBuffer = transferArrayBuffer(buffer); - return new ctor(transferedBuffer, byteOffset, bytesFilled / elementSize); + const transferredBuffer = transferArrayBuffer(buffer); + return new ctor(transferredBuffer, byteOffset, bytesFilled / elementSize); } function isReadableStreamLocked(stream) { @@ -2100,15 +2100,15 @@ function readableByteStreamControllerPullInto( const byteLength = ArrayBufferViewGetByteLength(view); const bufferByteLength = ArrayBufferGetByteLength(buffer); - let transferedBuffer; + let transferredBuffer; try { - transferedBuffer = transferArrayBuffer(buffer); + transferredBuffer = transferArrayBuffer(buffer); } catch (error) { readIntoRequest[kError](error); return; } const desc = { - buffer: transferedBuffer, + buffer: transferredBuffer, bufferByteLength, byteOffset, byteLength, @@ -2241,7 +2241,7 @@ function readableByteStreamControllerEnqueue( if (closeRequested || stream[kState].state !== 'readable') return; - const transferedBuffer = transferArrayBuffer(buffer); + const transferredBuffer = transferArrayBuffer(buffer); if (pendingPullIntos.length) { const firstPendingPullInto = pendingPullIntos[0]; @@ -2263,19 +2263,19 @@ function readableByteStreamControllerEnqueue( if (!readableStreamGetNumReadRequests(stream)) { readableByteStreamControllerEnqueueChunkToQueue( controller, - transferedBuffer, + transferredBuffer, byteOffset, byteLength); } else { assert(!queue.length); - const transferedView = - new Uint8Array(transferedBuffer, byteOffset, byteLength); - readableStreamFulfillReadRequest(stream, transferedView, false); + const transferredView = + new Uint8Array(transferredBuffer, byteOffset, byteLength); + readableStreamFulfillReadRequest(stream, transferredView, false); } } else if (readableStreamHasBYOBReader(stream)) { readableByteStreamControllerEnqueueChunkToQueue( controller, - transferedBuffer, + transferredBuffer, byteOffset, byteLength); readableByteStreamControllerProcessPullIntoDescriptorsUsingQueue( @@ -2284,7 +2284,7 @@ function readableByteStreamControllerEnqueue( assert(!isReadableStreamLocked(stream)); readableByteStreamControllerEnqueueChunkToQueue( controller, - transferedBuffer, + transferredBuffer, byteOffset, byteLength); } @@ -2663,7 +2663,7 @@ module.exports = { ReadableStreamBYOBRequest, ReadableByteStreamController, ReadableStreamDefaultController, - TransferedReadableStream, + TransferredReadableStream, // Exported Brand Checks isReadableStream, diff --git a/lib/internal/webstreams/transfer.js b/lib/internal/webstreams/transfer.js index 72cdc36a153564..985d7e86738f35 100644 --- a/lib/internal/webstreams/transfer.js +++ b/lib/internal/webstreams/transfer.js @@ -134,7 +134,7 @@ class CrossRealmTransformReadableSource { port.onmessageerror = () => { const error = new CloneableDOMException( - 'Internal transfered ReadableStream error', + 'Internal transferred ReadableStream error', 'DataCloneError'); port.postMessage({ type: 'error', value: error }); readableStreamDefaultControllerError( @@ -201,7 +201,7 @@ class CrossRealmTransformWritableSink { }; port.onmessageerror = () => { const error = new CloneableDOMException( - 'Internal transfered ReadableStream error', + 'Internal transferred ReadableStream error', 'DataCloneError'); port.postMessage({ type: 'error', value: error }); writableStreamDefaultControllerErrorIfNeeded( diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js index 745675266f7f1b..62940a1fb9da76 100644 --- a/lib/internal/webstreams/transformstream.js +++ b/lib/internal/webstreams/transformstream.js @@ -211,7 +211,7 @@ class TransformStream { writable, }, deserializeInfo: - 'internal/webstreams/transformstream:TransferedTransformStream' + 'internal/webstreams/transformstream:TransferredTransformStream' }; } @@ -230,7 +230,7 @@ ObjectDefineProperties(TransformStream.prototype, { writable: { enumerable: true }, }); -function TransferedTransformStream() { +function TransferredTransformStream() { return makeTransferable(ReflectConstruct( function() { this[kType] = 'TransformStream'; @@ -248,7 +248,7 @@ function TransferedTransformStream() { }, [], TransformStream)); } -TransferedTransformStream.prototype[kDeserialize] = () => {}; +TransferredTransformStream.prototype[kDeserialize] = () => {}; class TransformStreamDefaultController { [kType] = 'TransformStreamDefaultController'; @@ -583,7 +583,7 @@ function transformStreamDefaultSourcePullAlgorithm(stream) { module.exports = { TransformStream, TransformStreamDefaultController, - TransferedTransformStream, + TransferredTransformStream, // Exported Brand Checks isTransformStream, diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js index e0876caf81b944..8cf31d1c307ca9 100644 --- a/lib/internal/webstreams/util.js +++ b/lib/internal/webstreams/util.js @@ -121,7 +121,7 @@ function transferArrayBuffer(buffer) { const res = detachArrayBuffer(buffer); if (res === undefined) { throw new ERR_OPERATION_FAILED.TypeError( - 'The ArrayBuffer could not be transfered'); + 'The ArrayBuffer could not be transferred'); } return res; } diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js index 793ae9e9ad8fb4..c36ce82fcdcfbf 100644 --- a/lib/internal/webstreams/writablestream.js +++ b/lib/internal/webstreams/writablestream.js @@ -258,7 +258,7 @@ class WritableStream { return { data: { port: this[kState].transfer.port2 }, deserializeInfo: - 'internal/webstreams/writablestream:TransferedWritableStream' + 'internal/webstreams/writablestream:TransferredWritableStream' }; } @@ -286,7 +286,7 @@ ObjectDefineProperties(WritableStream.prototype, { getWriter: { enumerable: true }, }); -function TransferedWritableStream() { +function TransferredWritableStream() { return makeTransferable(ReflectConstruct( function() { this[kType] = 'WritableStream'; @@ -332,7 +332,7 @@ function TransferedWritableStream() { }, [], WritableStream)); } -TransferedWritableStream.prototype[kDeserialize] = () => {}; +TransferredWritableStream.prototype[kDeserialize] = () => {}; class WritableStreamDefaultWriter { [kType] = 'WritableStreamDefaultWriter'; @@ -1278,7 +1278,7 @@ module.exports = { WritableStream, WritableStreamDefaultWriter, WritableStreamDefaultController, - TransferedWritableStream, + TransferredWritableStream, // Exported Brand Checks isWritableStream, From 38ae4077c770c2fd12eff7198a58265db9ef6ef4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tobias=20Nie=C3=9Fen?= Date: Sun, 11 Jul 2021 17:51:31 +0200 Subject: [PATCH 132/133] doc: fix typos in Web Streams API documentation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR-URL: https://github.com/nodejs/node/pull/39351 Reviewed-By: Michaël Zasso Reviewed-By: Gus Caplan Reviewed-By: Luigi Pinca Reviewed-By: Gireesh Punathil Reviewed-By: James M Snell Reviewed-By: Colin Ihrig --- doc/api/webstreams.md | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md index 90667c1c1bb28c..e40850ef8531bb 100644 --- a/doc/api/webstreams.md +++ b/doc/api/webstreams.md @@ -352,7 +352,7 @@ method to acquire the async iterator and set the `preventCancel` option to The {ReadableStream} must not be locked (that is, it must not have an existing active reader). During the async iteration, the {ReadableStream} will be locked. -#### Transfering with `postMessage()` +#### Transferring with `postMessage()` A {ReadableStream} instance can be transferred using a {MessagePort}. @@ -554,7 +554,7 @@ the pooled `Buffer` instances. When a `Buffer`, {TypedArray}, or {DataView} is passed in to `readableStreamBYOBReader.read()`, the view's underlying `ArrayBuffer` is *detached*, invalidating all existing views that may exist on that `ArrayBuffer`. This -can have disasterous consequences for your application. +can have disastrous consequences for your application. #### `readableStreamBYOBReader.releaseLock()` @@ -2569,7 +2569,7 @@ See the POSIX mkdir(2) documentation for more details. diff --git a/doc/api/webstreams.md b/doc/api/webstreams.md index e40850ef8531bb..407230f96812ee 100644 --- a/doc/api/webstreams.md +++ b/doc/api/webstreams.md @@ -97,12 +97,12 @@ const stream = new ReadableStream({ ### Class: `ReadableStream` #### `new ReadableStream([underlyingSource [, strategy]])` @@ -135,7 +135,7 @@ added: REPLACEME #### `readableStream.locked` * Type: {boolean} Set to `true` if there is an active reader for this @@ -147,7 +147,7 @@ stream's data. #### `readableStream.cancel([reason])` * `reason` {any} @@ -156,7 +156,7 @@ added: REPLACEME #### `readableStream.getReader([options])` * `options` {Object} @@ -187,7 +187,7 @@ Causes the `readableStream.locked` to be `true`. #### `readableStream.pipeThrough(transform[, options])` * `transform` {Object} @@ -269,7 +269,7 @@ const transformedStream = stream.pipeThrough(transform); #### `readableStream.pipeTo(destination, options)` * `destination` {WritableStream} A {WritableStream} to which this @@ -291,7 +291,7 @@ is active. #### `readableStream.tee()` * Returns: {ReadableStream[]} @@ -304,7 +304,7 @@ Causes the `readableStream.locked` to be `true`. #### `readableStream.values([options])` * `options` {Object} @@ -372,7 +372,7 @@ port2.postMessage(stream, [stream]); ### Class: `ReadableStreamDefaultReader` By default, calling `readableStream.getReader()` with no arguments @@ -383,7 +383,7 @@ JavaScript value. #### `new ReadableStreamDefaultReader(stream)` * `stream` {ReadableStream} @@ -393,7 +393,7 @@ given {ReadableStream}. #### `readableStreamDefaultReader.cancel([reason])` * `reason` {any} @@ -404,7 +404,7 @@ when the underlying stream has been canceled. #### `readableStreamDefaultReader.closed` * Type: {Promise} Fulfilled with `undefined` when the associated @@ -412,7 +412,7 @@ added: REPLACEME #### `readableStreamDefaultReader.read()` * Returns: A promise fulfilled with an object: @@ -425,14 +425,14 @@ available. #### `readableStreamDefaultReader.releaseLock()` Releases this reader's lock on the underlying {ReadableStream}. ### Class: `ReadableStreamBYOBReader` The `ReadableStreamBYOBReader` is an alternative consumer for @@ -504,7 +504,7 @@ console.log(Buffer.from(data).toString()); #### `new ReadableStreamBYOBReader(stream)` * `stream` {ReadableStream} @@ -514,7 +514,7 @@ given {ReadableStream}. #### `readableStreamBYOBReader.cancel([reason])` * `reason` {any} @@ -525,7 +525,7 @@ when the underlying stream has been canceled. #### `readableStreamBYOBReader.closed` * Type: {Promise} Fulfilled with `undefined` when the associated @@ -533,7 +533,7 @@ added: REPLACEME #### `readableStreamBYOBReader.read(view)` * `view` {Buffer|TypedArray|DataView} @@ -558,14 +558,14 @@ can have disastrous consequences for your application. #### `readableStreamBYOBReader.releaseLock()` Releases this reader's lock on the underlying {ReadableStream}. ### Class: `ReadableStreamDefaultController` Every {ReadableStream} has a controller that is responsible for @@ -575,14 +575,14 @@ implementation for `ReadableStream`s that are not byte-oriented. #### `readableStreamDefaultController.close()` Closes the {ReadableStream} to which this controller is associated. #### `readableStreamDefaultController.desiredSize` * Type: {number} @@ -592,7 +592,7 @@ queue. #### `readableStreamDefaultController.enqueue(chunk)` * `chunk` {any} @@ -601,7 +601,7 @@ Appends a new chunk of data to the {ReadableStream}'s queue. #### `readableStreamDefaultController.error(error)` * `error` {any} @@ -610,7 +610,7 @@ Signals an error that causes the {ReadableStream} to error and close. ### Class: `ReadableByteStreamController` Every {ReadableStream} has a controller that is responsible for @@ -619,21 +619,21 @@ the internal state and management of the stream's queue. The #### `readableByteStreamController.byobRequest` * Type: {ReadableStreamBYOBRequest} #### `readableByteStreamController.close()` Closes the {ReadableStream} to which this controller is associated. #### `readableByteStreamController.desiredSize` * Type: {number} @@ -643,7 +643,7 @@ queue. #### `readableByteStreamController.enqueue(chunk)` * `chunk`: {Buffer|TypedArray|DataView} @@ -652,7 +652,7 @@ Appends a new chunk of data to the {ReadableStream}'s queue. #### `readableByteStreamController.error(error)` * `error` {any} @@ -661,7 +661,7 @@ Signals an error that causes the {ReadableStream} to error and close. ### Class: `ReadableStreamBYOBRequest` When using `ReadableByteStreamController` in byte-oriented @@ -676,7 +676,7 @@ been provided. #### `readableStreamBYOBRequest.respond(bytesWritten)` * `bytesWritten` {number} @@ -686,7 +686,7 @@ to `readableStreamBYOBRequest.view`. #### `readableStreamBYOBRequest.respondWithNewView(view)` * `view` {Buffer|TypedArray|DataView} @@ -696,14 +696,14 @@ to a new `Buffer`, `TypedArray`, or `DataView`. #### `readableStreamBYOBRequest.view` * Type: {Buffer|TypedArray|DataView} ### Class: `WritableStream` The `WritableStream` is a destination to which stream data is sent. @@ -724,7 +724,7 @@ await stream.getWriter().write('Hello World'); #### `new WritableStream([underlyingSink[, strategy]])` * `underlyingSink` {Object} @@ -756,7 +756,7 @@ added: REPLACEME #### `writableStream.abort([reason])` * `reason` {any} @@ -767,7 +767,7 @@ canceled with their associated promises rejected. #### `writableStream.close()` * Returns: A promise fulfilled with `undefined`. @@ -776,7 +776,7 @@ Closes the `WritableStream` when no additional writes are expected. #### `writableStream.getWriter()` * Returns: {WritableStreamDefaultWriter} @@ -786,7 +786,7 @@ data into the `WritableStream`. #### `writableStream.locked` * Type: {boolean} @@ -813,12 +813,12 @@ port2.postMessage(stream, [stream]); ### Class: `WritableStreamDefaultWriter` #### `new WritableStreamDefaultWriter(stream)` * `stream` {WritableStream} @@ -828,7 +828,7 @@ Creates a new `WritableStreamDefaultWriter` that is locked to the given #### `writableStreamDefaultWriter.abort([reason])` * `reason` {any} @@ -839,7 +839,7 @@ canceled with their associated promises rejected. #### `writableStreamDefaultWriter.close()` * Returns: A promise fulfilled with `undefined`. @@ -848,7 +848,7 @@ Closes the `WritableStream` when no additional writes are expected. #### `writableStreamDefaultWriter.closed` * Type: A promise that is fulfilled with `undefined` when the @@ -857,7 +857,7 @@ added: REPLACEME #### `writableStreamDefaultWriter.desiredSize` * Type: {number} @@ -866,7 +866,7 @@ The amount of data required to fill the {WritableStream}'s queue. #### `writableStreamDefaultWriter.ready` * type: A promise that is fulfilled with `undefined` when the @@ -874,14 +874,14 @@ added: REPLACEME #### `writableStreamDefaultWriter.releaseLock()` Releases this writer's lock on the underlying {ReadableStream}. #### `writableStreamDefaultWriter.write([chunk])` * `chunk`: {any} @@ -891,7 +891,7 @@ Appends a new chunk of data to the {WritableStream}'s queue. ### Class: `WritableStreamDefaultController` The `WritableStreamDefaultController` manage's the {WritableStream}'s @@ -903,7 +903,7 @@ internal state. #### `writableStreamDefaultController.error(error)` * `error` {any} @@ -919,7 +919,7 @@ with currently pending writes canceled. ### Class: `TransformStream` A `TransformStream` consists of a {ReadableStream} and a {WritableStream} that @@ -946,7 +946,7 @@ await Promise.all([ #### `new TransformStream([transformer[, writableStrategy[, readableStrategy]]])` * `transformer` {Object} @@ -986,14 +986,14 @@ added: REPLACEME #### `transformStream.readable` * Type: {ReadableStream} #### `transformStream.writable` * Type: {WritableStream} @@ -1017,7 +1017,7 @@ port2.postMessage(stream, [stream]); ### Class: `TransformStreamDefaultController` The `TransformStreamDefaultController` manages the internal state @@ -1025,7 +1025,7 @@ of the `TransformStream`. #### `transformStreamDefaultController.desiredSize` * Type: {number} @@ -1034,7 +1034,7 @@ The amount of data required to fill the readable side's queue. #### `transformStreamDefaultController.enqueue([chunk])` * `chunk` {any} @@ -1043,7 +1043,7 @@ Appends a chunk of data to the readable side's queue. #### `transformStreamDefaultController.error([reason])` * `reason` {any} @@ -1054,7 +1054,7 @@ closed. #### `transformStreamDefaultController.terminate()` Closes the readable side of the transport and causes the writable side @@ -1062,12 +1062,12 @@ to be abruptly closed with an error. ### Class: `ByteLengthQueuingStrategy` #### `new ByteLengthQueuingStrategy(options)` * `options` {Object} @@ -1075,14 +1075,14 @@ added: REPLACEME #### `byteLengthQueuingStrategy.highWaterMark` * Type: {number} #### `byteLengthQueuingStrategy.size` * Type: {Function} @@ -1091,12 +1091,12 @@ added: REPLACEME ### Class: `CountQueuingStrategy` #### `new CountQueuingStrategy(options)` * `options` {Object} @@ -1104,14 +1104,14 @@ added: REPLACEME #### `countQueuingStrategy.highWaterMark` * Type: {number} #### `countQueuingStrategy.size` * Type: {Function} diff --git a/doc/changelogs/CHANGELOG_V16.md b/doc/changelogs/CHANGELOG_V16.md index 1195cb9e726c9e..6ea9b378110ba8 100644 --- a/doc/changelogs/CHANGELOG_V16.md +++ b/doc/changelogs/CHANGELOG_V16.md @@ -10,6 +10,7 @@ +16.5.0
    16.4.2
    16.4.1
    16.4.0
    @@ -39,6 +40,171 @@ * [io.js](CHANGELOG_IOJS.md) * [Archive](CHANGELOG_ARCHIVE.md) + +## 2021-07-14, Version 16.5.0 (Current), @targos + +### Notable Changes + +#### Experimental Web Streams API + +Node.js now exposes an experimental implementation of the +[Web Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API). + +While it is experimental, the API is not exposed on the global object and is only +accessible using the new `stream/web` core module: + +```mjs +import { ReadableStream, WritableStream } from 'stream/web'; +// Or from 'node:stream/web' +``` + +Importing the module will emit a single experimental warning per process. + +The raw API is implemented and we are now working on its integration with +various existing core APIs. + +Contributed by James M Snell - [#39062](https://github.com/nodejs/node/pull/39062) + +#### Other notable changes + +* [[`83f3b959f9`](https://github.com/nodejs/node/commit/83f3b959f9)] - **(SEMVER-MINOR)** **fs**: allow empty string for temp directory prefix (Voltrex) [#39028](https://github.com/nodejs/node/pull/39028) +* [[`c04fd2bb97`](https://github.com/nodejs/node/commit/c04fd2bb97)] - **deps**: upgrade npm to 7.19.1 (npm team) [#39225](https://github.com/nodejs/node/pull/39225) + +### Commits + +* [[`aafa08d7b9`](https://github.com/nodejs/node/commit/aafa08d7b9)] - **bootstrap**: load perf\_hooks eagerly during bootstrap (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`6e46eb186c`](https://github.com/nodejs/node/commit/6e46eb186c)] - **bootstrap**: support perf hooks in snapshot (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`10681828ac`](https://github.com/nodejs/node/commit/10681828ac)] - **build**: update gcovr for gcc 8 compatibility (Richard Lau) [#39326](https://github.com/nodejs/node/pull/39326) +* [[`8381132f76`](https://github.com/nodejs/node/commit/8381132f76)] - **build**: add riscv into host\_arch\_cc (Lu Yahan) [#39004](https://github.com/nodejs/node/pull/39004) +* [[`a7ba21864d`](https://github.com/nodejs/node/commit/a7ba21864d)] - **build**: restore libplatform headers in distribution (Jeroen Ooms) [#39288](https://github.com/nodejs/node/pull/39288) +* [[`41161eabf2`](https://github.com/nodejs/node/commit/41161eabf2)] - **build**: remove unused comment in Makefile (LitoMore) [#39171](https://github.com/nodejs/node/pull/39171) +* [[`f6a1092471`](https://github.com/nodejs/node/commit/f6a1092471)] - **build**: allow to build riscv64 using Makefile (Makoto Kato) [#39048](https://github.com/nodejs/node/pull/39048) +* [[`a7cd40ed8d`](https://github.com/nodejs/node/commit/a7cd40ed8d)] - **build**: uvwasi honours node\_shared\_libuv (Jérémy Lal) [#39260](https://github.com/nodejs/node/pull/39260) +* [[`3ed04994b7`](https://github.com/nodejs/node/commit/3ed04994b7)] - **build**: shorten path used in tarball build workflow (Richard Lau) [#39192](https://github.com/nodejs/node/pull/39192) +* [[`65b56b3774`](https://github.com/nodejs/node/commit/65b56b3774)] - **build**: fix building with external builtins (Momtchil Momtchev) [#39091](https://github.com/nodejs/node/pull/39091) +* [[`412b1012d2`](https://github.com/nodejs/node/commit/412b1012d2)] - **build**: pass directory instead of list of files to js2c.py (Joyee Cheung) [#39069](https://github.com/nodejs/node/pull/39069) +* [[`171ca6bb3c`](https://github.com/nodejs/node/commit/171ca6bb3c)] - **build**: don't pass `--mode` argument to V8 test-runner (Richard Lau) [#39055](https://github.com/nodejs/node/pull/39055) +* [[`cf8536ea3f`](https://github.com/nodejs/node/commit/cf8536ea3f)] - **build**: fix commit linter on unrebased PRs (Mary Marchini) [#39121](https://github.com/nodejs/node/pull/39121) +* [[`cf0533b8b2`](https://github.com/nodejs/node/commit/cf0533b8b2)] - **build**: use Actions to validate commit message (Mary Marchini) [#32417](https://github.com/nodejs/node/pull/32417) +* [[`4202274851`](https://github.com/nodejs/node/commit/4202274851)] - **crypto**: move OPENSSL\_IS\_BORINGSSL guard (Shelley Vohr) [#39136](https://github.com/nodejs/node/pull/39136) +* [[`89f5a73ba5`](https://github.com/nodejs/node/commit/89f5a73ba5)] - **crypto**: use compatible ecdh function (Shelley Vohr) [#39054](https://github.com/nodejs/node/pull/39054) +* [[`30e878b603`](https://github.com/nodejs/node/commit/30e878b603)] - **crypto**: add OPENSSL\_IS\_BORINGSSL guard (Shelley Vohr) [#39138](https://github.com/nodejs/node/pull/39138) +* [[`630266cba2`](https://github.com/nodejs/node/commit/630266cba2)] - **debugger**: indicate server is ending (Rich Trott) [#39334](https://github.com/nodejs/node/pull/39334) +* [[`48d9680f84`](https://github.com/nodejs/node/commit/48d9680f84)] - **debugger**: remove final lint exceptions in inspect\_repl.js (Rich Trott) [#39078](https://github.com/nodejs/node/pull/39078) +* [[`4507714f9d`](https://github.com/nodejs/node/commit/4507714f9d)] - **deps**: V8: backport 5c76da8ddcf8 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`0e64bd0dd6`](https://github.com/nodejs/node/commit/0e64bd0dd6)] - **deps**: V8: cherry-pick 359d44df4cdd (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`142ce6838b`](https://github.com/nodejs/node/commit/142ce6838b)] - **deps**: V8: cherry-pick 3805a698f7b6 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`2657c305cb`](https://github.com/nodejs/node/commit/2657c305cb)] - **deps**: V8: cherry-pick 56fe020eec0c (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`5c5a93e533`](https://github.com/nodejs/node/commit/5c5a93e533)] - **deps**: V8: cherry-pick 2b77ca200c56 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`cf49ebb052`](https://github.com/nodejs/node/commit/cf49ebb052)] - **deps**: V8: cherry-pick 53784bdb8f01 (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`3d351b29c1`](https://github.com/nodejs/node/commit/3d351b29c1)] - **deps**: V8: cherry-pick cb4faa902e9f (Michaël Zasso) [#39337](https://github.com/nodejs/node/pull/39337) +* [[`165130a3e0`](https://github.com/nodejs/node/commit/165130a3e0)] - **deps**: patch V8 to 9.1.269.38 (Michaël Zasso) [#39196](https://github.com/nodejs/node/pull/39196) +* [[`c04fd2bb97`](https://github.com/nodejs/node/commit/c04fd2bb97)] - **deps**: upgrade npm to 7.19.1 (npm team) [#39225](https://github.com/nodejs/node/pull/39225) +* [[`bf4c50f9d9`](https://github.com/nodejs/node/commit/bf4c50f9d9)] - **deps**: upgrade npm to 7.19.0 (npm team) [#39148](https://github.com/nodejs/node/pull/39148) +* [[`8630b39376`](https://github.com/nodejs/node/commit/8630b39376)] - **deps**: update Acorn to v8.4.1 (Michaël Zasso) [#39166](https://github.com/nodejs/node/pull/39166) +* [[`38ae4077c7`](https://github.com/nodejs/node/commit/38ae4077c7)] - **doc**: fix typos in Web Streams API documentation (Tobias Nießen) [#39351](https://github.com/nodejs/node/pull/39351) +* [[`fb6616ecbb`](https://github.com/nodejs/node/commit/fb6616ecbb)] - **doc**: add text about moving long commit lists out of PR description (Danielle Adams) [#39186](https://github.com/nodejs/node/pull/39186) +* [[`29c9cc8f03`](https://github.com/nodejs/node/commit/29c9cc8f03)] - **doc**: do not use & for "and" in text (Rich Trott) [#39345](https://github.com/nodejs/node/pull/39345) +* [[`0b3b2695bc`](https://github.com/nodejs/node/commit/0b3b2695bc)] - **doc**: do not use tilde for "about" or "approximately" (Rich Trott) [#39344](https://github.com/nodejs/node/pull/39344) +* [[`64a185e595`](https://github.com/nodejs/node/commit/64a185e595)] - **doc**: use consistent abbreviation formatting (Rich Trott) [#39343](https://github.com/nodejs/node/pull/39343) +* [[`2573bf5116`](https://github.com/nodejs/node/commit/2573bf5116)] - **doc**: update AUTHORS (Rich Trott) [#39277](https://github.com/nodejs/node/pull/39277) +* [[`63b6084724`](https://github.com/nodejs/node/commit/63b6084724)] - **doc**: put information about the past in details tags (Rich Trott) [#39321](https://github.com/nodejs/node/pull/39321) +* [[`e26635085a`](https://github.com/nodejs/node/commit/e26635085a)] - **doc**: replace outdated `util.promisify` timer examples with references (foxxyz) [#39164](https://github.com/nodejs/node/pull/39164) +* [[`d101a85e36`](https://github.com/nodejs/node/commit/d101a85e36)] - **doc**: move AndreasMadsen to emeritus (Rich Trott) [#39315](https://github.com/nodejs/node/pull/39315) +* [[`2d552a32d6`](https://github.com/nodejs/node/commit/2d552a32d6)] - **doc**: move ofrobots to collaborator emeritus (Rich Trott) [#39307](https://github.com/nodejs/node/pull/39307) +* [[`131d676f64`](https://github.com/nodejs/node/commit/131d676f64)] - **doc**: simplify CRAN mirror text in benchmark guide (Rich Trott) [#39287](https://github.com/nodejs/node/pull/39287) +* [[`c92b80e631`](https://github.com/nodejs/node/commit/c92b80e631)] - **doc**: use "repository" instead of "repo" in onboarding.md (Rich Trott) [#39286](https://github.com/nodejs/node/pull/39286) +* [[`81df9b1e92`](https://github.com/nodejs/node/commit/81df9b1e92)] - **doc**: update collaborator email address (Rich Trott) [#39263](https://github.com/nodejs/node/pull/39263) +* [[`b8860f35c9`](https://github.com/nodejs/node/commit/b8860f35c9)] - **doc**: remove GitHub mark (Rich Trott) [#39251](https://github.com/nodejs/node/pull/39251) +* [[`f06ebf1775`](https://github.com/nodejs/node/commit/f06ebf1775)] - **doc**: remove emailing the TSC from offboarding doc (Rich Trott) [#39280](https://github.com/nodejs/node/pull/39280) +* [[`175a6569f4`](https://github.com/nodejs/node/commit/175a6569f4)] - **doc**: add annotation to writeFile `data` as `Object` (Jacob) [#39167](https://github.com/nodejs/node/pull/39167) +* [[`4d53c63c22`](https://github.com/nodejs/node/commit/4d53c63c22)] - **doc**: fix boldface punctuation for full sentences (Rich Trott) [#39278](https://github.com/nodejs/node/pull/39278) +* [[`146f733f43`](https://github.com/nodejs/node/commit/146f733f43)] - **doc**: fix constants usage in fs.access example (Cyrille Bourgois) [#39289](https://github.com/nodejs/node/pull/39289) +* [[`eacee0ab17`](https://github.com/nodejs/node/commit/eacee0ab17)] - **doc**: use "repository" in guides versus repo (Michael Dawson) [#39198](https://github.com/nodejs/node/pull/39198) +* [[`04bcfcfff1`](https://github.com/nodejs/node/commit/04bcfcfff1)] - **doc**: update Node-api version matrix (Michael Dawson) [#39197](https://github.com/nodejs/node/pull/39197) +* [[`4dd6ab389a`](https://github.com/nodejs/node/commit/4dd6ab389a)] - **doc**: remove onboarding-extras (Rich Trott) [#39252](https://github.com/nodejs/node/pull/39252) +* [[`a01dacfdcd`](https://github.com/nodejs/node/commit/a01dacfdcd)] - **doc**: move Sam Ruby to emeritus (Rich Trott) [#39264](https://github.com/nodejs/node/pull/39264) +* [[`2bb3713b74`](https://github.com/nodejs/node/commit/2bb3713b74)] - **doc**: update AUTHORS file (Rich Trott) [#39250](https://github.com/nodejs/node/pull/39250) +* [[`2227c1368f`](https://github.com/nodejs/node/commit/2227c1368f)] - **doc**: fix color contrast for anchor marks in dark mode (Rich Trott) [#39168](https://github.com/nodejs/node/pull/39168) +* [[`f8cdaad9d4`](https://github.com/nodejs/node/commit/f8cdaad9d4)] - **doc**: rename datatypes to data types (FrankEntriken) [#39209](https://github.com/nodejs/node/pull/39209) +* [[`250024eaec`](https://github.com/nodejs/node/commit/250024eaec)] - **doc**: normalize CSS variable names and indentation (Rich Trott) [#39199](https://github.com/nodejs/node/pull/39199) +* [[`db74a35348`](https://github.com/nodejs/node/commit/db74a35348)] - **doc**: remove unnecessary module format comments (Rich Trott) [#39219](https://github.com/nodejs/node/pull/39219) +* [[`24a1f7ec84`](https://github.com/nodejs/node/commit/24a1f7ec84)] - **doc**: use more consistent formatting for deprecations (Rich Trott) [#39218](https://github.com/nodejs/node/pull/39218) +* [[`24c0d7d872`](https://github.com/nodejs/node/commit/24c0d7d872)] - **doc**: update AUTHORS (Rich Trott) [#39217](https://github.com/nodejs/node/pull/39217) +* [[`3e5ed72b0a`](https://github.com/nodejs/node/commit/3e5ed72b0a)] - **doc**: use "pull request" instead of "PR" in packages.md (Rich Trott) [#39213](https://github.com/nodejs/node/pull/39213) +* [[`ddc24b2105`](https://github.com/nodejs/node/commit/ddc24b2105)] - **doc**: move v8.stopCoverage() to expected location in doc (Rich Trott) [#39212](https://github.com/nodejs/node/pull/39212) +* [[`68c334c8c9`](https://github.com/nodejs/node/commit/68c334c8c9)] - **doc**: move vm.measureMemory() to expected location in doc (Rich Trott) [#39211](https://github.com/nodejs/node/pull/39211) +* [[`81d52d7c79`](https://github.com/nodejs/node/commit/81d52d7c79)] - **doc**: fix CHANGELOG.md formatting (Richard Lau) [#39223](https://github.com/nodejs/node/pull/39223) +* [[`9c3a5fd53e`](https://github.com/nodejs/node/commit/9c3a5fd53e)] - **doc**: add cc oss-security@lists.openwall.com (Daniel Bevenius) [#39191](https://github.com/nodejs/node/pull/39191) +* [[`07ba2875ae`](https://github.com/nodejs/node/commit/07ba2875ae)] - **doc**: remove instructions for unsupported Node.js versions (Rich Trott) [#39185](https://github.com/nodejs/node/pull/39185) +* [[`482851f647`](https://github.com/nodejs/node/commit/482851f647)] - **doc**: remove obsolete cc recommendations (Rich Trott) [#39181](https://github.com/nodejs/node/pull/39181) +* [[`8311b29083`](https://github.com/nodejs/node/commit/8311b29083)] - **doc**: use "repository" in maintaining-V8 doc (Rich Trott) [#39179](https://github.com/nodejs/node/pull/39179) +* [[`952580e1bf`](https://github.com/nodejs/node/commit/952580e1bf)] - **doc**: fix broken link in errors.md (Rich Trott) [#39200](https://github.com/nodejs/node/pull/39200) +* [[`af1e1dba36`](https://github.com/nodejs/node/commit/af1e1dba36)] - **doc**: correct JavaScript primitive value names in n-api.md (legendecas) [#39129](https://github.com/nodejs/node/pull/39129) +* [[`00728d1301`](https://github.com/nodejs/node/commit/00728d1301)] - **doc**: apply logical ordering to CSS variables (Rich Trott) [#39169](https://github.com/nodejs/node/pull/39169) +* [[`aec2744e14`](https://github.com/nodejs/node/commit/aec2744e14)] - **doc**: remove file name from self-reference links (Antoine du Hamel) [#39165](https://github.com/nodejs/node/pull/39165) +* [[`74bb915178`](https://github.com/nodejs/node/commit/74bb915178)] - **doc**: use repository instead of repo (Rich Trott) [#39157](https://github.com/nodejs/node/pull/39157) +* [[`a669a191a1`](https://github.com/nodejs/node/commit/a669a191a1)] - **doc**: use ASCII order for md refs (Antoine du Hamel) [#39170](https://github.com/nodejs/node/pull/39170) +* [[`21e8720155`](https://github.com/nodejs/node/commit/21e8720155)] - **doc**: fix `EventTarget.dispatchEvent` docs (Rohan Sharma) [#39127](https://github.com/nodejs/node/pull/39127) +* [[`90ec7660bc`](https://github.com/nodejs/node/commit/90ec7660bc)] - **doc**: update AUTHORS file (Rich Trott) [#39082](https://github.com/nodejs/node/pull/39082) +* [[`81cebec5cc`](https://github.com/nodejs/node/commit/81cebec5cc)] - **doc**: esm examples /w imports for process, Buffer (Guy Bedford) [#39043](https://github.com/nodejs/node/pull/39043) +* [[`c1588887a6`](https://github.com/nodejs/node/commit/c1588887a6)] - **doc**: fix napi\_default\_property name (Davidson Francis) [#39104](https://github.com/nodejs/node/pull/39104) +* [[`a440f6c69c`](https://github.com/nodejs/node/commit/a440f6c69c)] - **doc**: fix dead links in packages.md (Michaël Zasso) [#39113](https://github.com/nodejs/node/pull/39113) +* [[`33cad271c5`](https://github.com/nodejs/node/commit/33cad271c5)] - **errors**: remove eager stack generation for node errors (Gus Caplan) [#39182](https://github.com/nodejs/node/pull/39182) +* [[`ac05a0a8a3`](https://github.com/nodejs/node/commit/ac05a0a8a3)] - **errors**: don't throw TypeError on missing export (Benjamin Coe) [#39017](https://github.com/nodejs/node/pull/39017) +* [[`83f3b959f9`](https://github.com/nodejs/node/commit/83f3b959f9)] - **(SEMVER-MINOR)** **fs**: allow empty string for temp directory prefix (Voltrex) [#39028](https://github.com/nodejs/node/pull/39028) +* [[`ac7184d8c7`](https://github.com/nodejs/node/commit/ac7184d8c7)] - **http**: clean up HttpParser correctly (Tobias Koppers) [#39292](https://github.com/nodejs/node/pull/39292) +* [[`35331cbd13`](https://github.com/nodejs/node/commit/35331cbd13)] - **http,https**: align server option of https with http (Qingyu Deng) [#38992](https://github.com/nodejs/node/pull/38992) +* [[`29194d4f88`](https://github.com/nodejs/node/commit/29194d4f88)] - **inspector**: move inspector async hooks to environment (Joyee Cheung) [#39112](https://github.com/nodejs/node/pull/39112) +* [[`ecf627a9af`](https://github.com/nodejs/node/commit/ecf627a9af)] - **lib**: rename TransferedReadableStream etc (Tobias Nießen) [#39352](https://github.com/nodejs/node/pull/39352) +* [[`0e55cb72df`](https://github.com/nodejs/node/commit/0e55cb72df)] - **lib**: make lazyDOMException more common (Khaidi Chu) [#39105](https://github.com/nodejs/node/pull/39105) +* [[`cfd96aa8f9`](https://github.com/nodejs/node/commit/cfd96aa8f9)] - **meta**: fix tls code owners (Robert Nagy) [#39355](https://github.com/nodejs/node/pull/39355) +* [[`e5c2d80560`](https://github.com/nodejs/node/commit/e5c2d80560)] - **meta**: use form schema for bug report template (Michaël Zasso) [#39194](https://github.com/nodejs/node/pull/39194) +* [[`bd472daf0c`](https://github.com/nodejs/node/commit/bd472daf0c)] - **meta**: add @nodejs/actions as CODEOWNERS (Mary Marchini) [#39119](https://github.com/nodejs/node/pull/39119) +* [[`63f87027e3`](https://github.com/nodejs/node/commit/63f87027e3)] - **node-api**: cctest on v8impl::Reference (legendecas) [#38970](https://github.com/nodejs/node/pull/38970) +* [[`7ea98fbccd`](https://github.com/nodejs/node/commit/7ea98fbccd)] - **perf_hooks**: refactor perf\_hooks for snapshot building (Joyee Cheung) [#38971](https://github.com/nodejs/node/pull/38971) +* [[`20cc8ec2af`](https://github.com/nodejs/node/commit/20cc8ec2af)] - **readline**: allow completer to rewrite existing input (Anna Henningsen) [#39178](https://github.com/nodejs/node/pull/39178) +* [[`b168ec2a2a`](https://github.com/nodejs/node/commit/b168ec2a2a)] - **repl**: processTopLevelAwait fallback error handling (ejose19) [#39290](https://github.com/nodejs/node/pull/39290) +* [[`a101fe68ad`](https://github.com/nodejs/node/commit/a101fe68ad)] - **repl**: correctly hoist top level await declarations (ejose19) [#39265](https://github.com/nodejs/node/pull/39265) +* [[`d441d91450`](https://github.com/nodejs/node/commit/d441d91450)] - **repl**: ensure correct syntax err for await parsing (Guy Bedford) [#39154](https://github.com/nodejs/node/pull/39154) +* [[`9184259a54`](https://github.com/nodejs/node/commit/9184259a54)] - **src**: add JSDoc typings for v8 (Voltrex) [#38944](https://github.com/nodejs/node/pull/38944) +* [[`66553feeba`](https://github.com/nodejs/node/commit/66553feeba)] - **src**: compare IPv4 addresses in host byte order (Colin Ihrig) [#39096](https://github.com/nodejs/node/pull/39096) +* [[`ea8d83bf59`](https://github.com/nodejs/node/commit/ea8d83bf59)] - **src,crypto**: fix 0-length output crash in webcrypto (Khaidi Chu) [#38913](https://github.com/nodejs/node/pull/38913) +* [[`683c995001`](https://github.com/nodejs/node/commit/683c995001)] - **src,zlib**: tighten up Z\_\*\_WINDOWBITS macros (Khaidi Chu) [#39115](https://github.com/nodejs/node/pull/39115) +* [[`cb32f69e00`](https://github.com/nodejs/node/commit/cb32f69e00)] - **stream**: cleanup async handling (Robert Nagy) [#39329](https://github.com/nodejs/node/pull/39329) +* [[`1fc6382942`](https://github.com/nodejs/node/commit/1fc6382942)] - **stream**: don't emit prefinish after error or close (Robert Nagy) [#39332](https://github.com/nodejs/node/pull/39332) +* [[`35b6669e13`](https://github.com/nodejs/node/commit/35b6669e13)] - **stream**: use finished for pump (Robert Nagy) [#39203](https://github.com/nodejs/node/pull/39203) +* [[`9af62a1357`](https://github.com/nodejs/node/commit/9af62a1357)] - **(SEMVER-MINOR)** **stream**: implement WHATWG streams (James M Snell) [#39062](https://github.com/nodejs/node/pull/39062) +* [[`0bb980aeaf`](https://github.com/nodejs/node/commit/0bb980aeaf)] - **test**: remove eslint-disable comment from fixture file (Rich Trott) [#39320](https://github.com/nodejs/node/pull/39320) +* [[`21f77031fb`](https://github.com/nodejs/node/commit/21f77031fb)] - **test**: move debugger test case to parallel (Rich Trott) [#39300](https://github.com/nodejs/node/pull/39300) +* [[`0ec93a1fc1`](https://github.com/nodejs/node/commit/0ec93a1fc1)] - **test**: use common.PORT instead of hardcoded port number (Rich Trott) [#39298](https://github.com/nodejs/node/pull/39298) +* [[`11a8b81caf`](https://github.com/nodejs/node/commit/11a8b81caf)] - **test**: remove debugger workaround for AIX (Rich Trott) [#39296](https://github.com/nodejs/node/pull/39296) +* [[`8e77aa23f1`](https://github.com/nodejs/node/commit/8e77aa23f1)] - **test**: add test for debugger restart message issue (Rich Trott) [#39273](https://github.com/nodejs/node/pull/39273) +* [[`13755599e1`](https://github.com/nodejs/node/commit/13755599e1)] - **test**: remove workaround code in debugger test (Rich Trott) [#39238](https://github.com/nodejs/node/pull/39238) +* [[`1f31e3c774`](https://github.com/nodejs/node/commit/1f31e3c774)] - **test**: remove checks for armv6 (Rich Trott) [#39162](https://github.com/nodejs/node/pull/39162) +* [[`d486d0117c`](https://github.com/nodejs/node/commit/d486d0117c)] - **test**: move test-debugger-address to parallel (Rich Trott) [#39236](https://github.com/nodejs/node/pull/39236) +* [[`cdc7a19f48`](https://github.com/nodejs/node/commit/cdc7a19f48)] - **test**: remove common.enoughTestCpu (Rich Trott) [#39161](https://github.com/nodejs/node/pull/39161) +* [[`cc32365f56`](https://github.com/nodejs/node/commit/cc32365f56)] - **(SEMVER-MINOR)** **test**: add WPT streams tests (James M Snell) [#39062](https://github.com/nodejs/node/pull/39062) +* [[`fff21a4afb`](https://github.com/nodejs/node/commit/fff21a4afb)] - **test**: replace "inspector-cli" with "debugger" (Rich Trott) [#39156](https://github.com/nodejs/node/pull/39156) +* [[`df17c62818`](https://github.com/nodejs/node/commit/df17c62818)] - **test**: use localhost test instead of connecting to remote (Adam Majer) [#39011](https://github.com/nodejs/node/pull/39011) +* [[`dfe99d2aac`](https://github.com/nodejs/node/commit/dfe99d2aac)] - **tls**: move legacy code into own file (Robert Nagy) [#39333](https://github.com/nodejs/node/pull/39333) +* [[`f338fddbb0`](https://github.com/nodejs/node/commit/f338fddbb0)] - **tools**: add GitHub Action to run find-inactive-collaborators.mjs (Rich Trott) [#39335](https://github.com/nodejs/node/pull/39335) +* [[`b3a0dd1e4a`](https://github.com/nodejs/node/commit/b3a0dd1e4a)] - **tools**: pass bot token to node-pr-labeler (Michaël Zasso) [#39271](https://github.com/nodejs/node/pull/39271) +* [[`b56a3d9009`](https://github.com/nodejs/node/commit/b56a3d9009)] - **tools**: update gyp-next to v0.9.3 (Jiawen Geng) [#39291](https://github.com/nodejs/node/pull/39291) +* [[`3cd9f5e298`](https://github.com/nodejs/node/commit/3cd9f5e298)] - **tools**: add find-inactive-collaborators.js (Rich Trott) [#39262](https://github.com/nodejs/node/pull/39262) +* [[`0673ede3ad`](https://github.com/nodejs/node/commit/0673ede3ad)] - **tools**: take ownership of deps/v8/tools/node (Michaël Zasso) [#39222](https://github.com/nodejs/node/pull/39222) +* [[`cb8c6ffbce`](https://github.com/nodejs/node/commit/cb8c6ffbce)] - **tools**: update ESLint to 7.30.0 (Colin Ihrig) [#39242](https://github.com/nodejs/node/pull/39242) +* [[`d5113f9e34`](https://github.com/nodejs/node/commit/d5113f9e34)] - **tools**: remove armv6 from test tools (Rich Trott) [#39162](https://github.com/nodejs/node/pull/39162) +* [[`802d9c4488`](https://github.com/nodejs/node/commit/802d9c4488)] - **tools**: update path-parse to 1.0.7 (Rich Trott) [#39232](https://github.com/nodejs/node/pull/39232) +* [[`ab9ccd014c`](https://github.com/nodejs/node/commit/ab9ccd014c)] - **tools**: remove unused `lint-pr-commit-message.sh` (Richard Lau) [#39120](https://github.com/nodejs/node/pull/39120) +* [[`6200f3b35f`](https://github.com/nodejs/node/commit/6200f3b35f)] - **tools**: update @babel/eslint-parser to 7.14.7 (Rich Trott) [#39160](https://github.com/nodejs/node/pull/39160) +* [[`dfe5d1139c`](https://github.com/nodejs/node/commit/dfe5d1139c)] - **tools**: update remark-preset-lint-node to 2.4.1 (Rich Trott) [#39201](https://github.com/nodejs/node/pull/39201) +* [[`4715105581`](https://github.com/nodejs/node/commit/4715105581)] - **tools**: upgrade `highlight.js` to version 11.0.1 (Antoine du Hamel) [#39032](https://github.com/nodejs/node/pull/39032) +* [[`2481ddd08d`](https://github.com/nodejs/node/commit/2481ddd08d)] - **tools,doc**: fix error message for unrecognized type (Antoine du Hamel) [#39221](https://github.com/nodejs/node/pull/39221) +* [[`adb812c042`](https://github.com/nodejs/node/commit/adb812c042)] - **typings**: add a few JSDoc typings for the net lib module (nerdthatnoonelikes) [#38953](https://github.com/nodejs/node/pull/38953) +* [[`29673b8ac8`](https://github.com/nodejs/node/commit/29673b8ac8)] - **typings**: add JSDoc typings for timers (Voltrex) [#38834](https://github.com/nodejs/node/pull/38834) +* [[`fe1c81f247`](https://github.com/nodejs/node/commit/fe1c81f247)] - **wasi**: use missing validator (Voltrex) [#39070](https://github.com/nodejs/node/pull/39070) + ## 2021-07-05, Version 16.4.2 (Current), @BethGriggs @@ -80,7 +246,7 @@ Vulnerabilities fixed: * **async_hooks**: * stabilize part of AsyncLocalStorage (Vladimir de Turckheim) [#37675](https://github.com/nodejs/node/pull/37675) * **deps**: - * upgrade npm to 7.18.1 (npm-robot) [#39065](https://github.com/nodejs/node/pull/39065) + * upgrade npm to 7.18.1 (npm team) [#39065](https://github.com/nodejs/node/pull/39065) * update V8 to 9.1.269.36 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) * **dns**: * allow `--dns-result-order` to change default dns verbatim (Ouyang Yadong) [#38099](https://github.com/nodejs/node/pull/38099) @@ -114,8 +280,8 @@ Vulnerabilities fixed: * [[`dc9218136b`](https://github.com/nodejs/node/commit/dc9218136b)] - **debugger**: use ERR\_DEBUGGER\_ERROR in debugger client (Rich Trott) [#39024](https://github.com/nodejs/node/pull/39024) * [[`711916a271`](https://github.com/nodejs/node/commit/711916a271)] - **debugger**: remove unnecessary boilerplate copyright comment (Rich Trott) [#38952](https://github.com/nodejs/node/pull/38952) * [[`0f65e41442`](https://github.com/nodejs/node/commit/0f65e41442)] - **debugger**: reduce scope of eslint disable comment (Rich Trott) [#38946](https://github.com/nodejs/node/pull/38946) -* [[`1fa724ec5a`](https://github.com/nodejs/node/commit/1fa724ec5a)] - **deps**: upgrade npm to 7.18.1 (npm-robot) [#39065](https://github.com/nodejs/node/pull/39065) -* [[`c6aa68598d`](https://github.com/nodejs/node/commit/c6aa68598d)] - **deps**: upgrade npm to 7.17.0 (npm-robot) [#38999](https://github.com/nodejs/node/pull/38999) +* [[`1fa724ec5a`](https://github.com/nodejs/node/commit/1fa724ec5a)] - **deps**: upgrade npm to 7.18.1 (npm team) [#39065](https://github.com/nodejs/node/pull/39065) +* [[`c6aa68598d`](https://github.com/nodejs/node/commit/c6aa68598d)] - **deps**: upgrade npm to 7.17.0 (npm team) [#38999](https://github.com/nodejs/node/pull/38999) * [[`864fe9910b`](https://github.com/nodejs/node/commit/864fe9910b)] - **deps**: make V8 9.1 abi-compatible with 9.0 (Michaël Zasso) [#38991](https://github.com/nodejs/node/pull/38991) * [[`c93f3573eb`](https://github.com/nodejs/node/commit/c93f3573eb)] - **deps**: V8: cherry-pick fa4cb172cde2 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) * [[`3c6c28b0a1`](https://github.com/nodejs/node/commit/3c6c28b0a1)] - **deps**: V8: cherry-pick 4c074516397b (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) @@ -131,7 +297,7 @@ Vulnerabilities fixed: * [[`4ef37c83a9`](https://github.com/nodejs/node/commit/4ef37c83a9)] - **deps**: V8: patch register-arm64.h (Refael Ackermann) [#32116](https://github.com/nodejs/node/pull/32116) * [[`7c61c6ee25`](https://github.com/nodejs/node/commit/7c61c6ee25)] - **deps**: V8: un-cherry-pick bd019bd (Refael Ackermann) [#32116](https://github.com/nodejs/node/pull/32116) * [[`e82ef4148e`](https://github.com/nodejs/node/commit/e82ef4148e)] - **(SEMVER-MINOR)** **deps**: update V8 to 9.1.269.36 (Michaël Zasso) [#38273](https://github.com/nodejs/node/pull/38273) -* [[`70af146745`](https://github.com/nodejs/node/commit/70af146745)] - **deps**: upgrade npm to 7.16.0 (npm-robot) [#38920](https://github.com/nodejs/node/pull/38920) +* [[`70af146745`](https://github.com/nodejs/node/commit/70af146745)] - **deps**: upgrade npm to 7.16.0 (npm team) [#38920](https://github.com/nodejs/node/pull/38920) * [[`a71df7630e`](https://github.com/nodejs/node/commit/a71df7630e)] - **(SEMVER-MINOR)** **dns**: allow `--dns-result-order` to change default dns verbatim (Ouyang Yadong) [#38099](https://github.com/nodejs/node/pull/38099) * [[`dce256b210`](https://github.com/nodejs/node/commit/dce256b210)] - **doc**: remove references to deleted freenode channels (devsnek) [#39047](https://github.com/nodejs/node/pull/39047) * [[`1afff98805`](https://github.com/nodejs/node/commit/1afff98805)] - **doc**: fix typos (bl-ue) [#39049](https://github.com/nodejs/node/pull/39049) @@ -230,7 +396,7 @@ Vulnerabilities fixed: * [[`eb7c932a6d`](https://github.com/nodejs/node/commit/eb7c932a6d)] - **debugger**: revise async iterator usage to comply with lint rules (Rich Trott) [#38847](https://github.com/nodejs/node/pull/38847) * [[`f1000e0e52`](https://github.com/nodejs/node/commit/f1000e0e52)] - **debugger**: removed unused function argument (Rich Trott) [#38850](https://github.com/nodejs/node/pull/38850) * [[`ee1056da60`](https://github.com/nodejs/node/commit/ee1056da60)] - **debugger**: wait for V8 debugger to be enabled (Michaël Zasso) [#38811](https://github.com/nodejs/node/pull/38811) -* [[`47ad448def`](https://github.com/nodejs/node/commit/47ad448def)] - **deps**: upgrade npm to 7.15.1 (npm-robot) [#38880](https://github.com/nodejs/node/pull/38880) +* [[`47ad448def`](https://github.com/nodejs/node/commit/47ad448def)] - **deps**: upgrade npm to 7.15.1 (npm team) [#38880](https://github.com/nodejs/node/pull/38880) * [[`e8192b5e89`](https://github.com/nodejs/node/commit/e8192b5e89)] - **deps**: upgrade npm to 7.14.0 (Ruy Adorno) [#38750](https://github.com/nodejs/node/pull/38750) * [[`15aaf14690`](https://github.com/nodejs/node/commit/15aaf14690)] - **deps**: update llhttp to 6.0.2 (Fedor Indutny) [#38665](https://github.com/nodejs/node/pull/38665) * [[`108ffdb68f`](https://github.com/nodejs/node/commit/108ffdb68f)] - **doc**: fixed typo in n-api.md (julianjany) [#38822](https://github.com/nodejs/node/pull/38822) diff --git a/src/node_version.h b/src/node_version.h index 4eb0f2108776f1..0523885212d429 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 16 -#define NODE_MINOR_VERSION 4 -#define NODE_PATCH_VERSION 3 +#define NODE_MINOR_VERSION 5 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)