diff --git a/.eslintrc.js b/.eslintrc.js index c2c8e1df7e58d3..f86f42e8f2e89f 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -210,6 +210,10 @@ module.exports = { selector: 'ThrowStatement > CallExpression[callee.name=/Error$/]', message: 'Use `new` keyword when throwing an `Error`.', }, + { + selector: "CallExpression[callee.property.name='substr']", + message: 'Use String.prototype.slice() or String.prototype.substring() instead of String.prototype.substr()', + }, { selector: "CallExpression[callee.name='isNaN']", message: 'Use Number.isNaN() instead of the global isNaN() function.', diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index b68e8b2f0e6a46..1e1d8e83fda103 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -21,7 +21,7 @@ jobs: with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool - - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 + - uses: gr2m/create-or-update-pull-request-action@86ec1766034c8173518f61d2075cc2a173fb8c97 # v1.9.4 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. env: diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index f7f0c0aa1d7cdf..20dffc02b1d501 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -33,6 +33,10 @@ jobs: test-linux: if: github.event.pull_request.draft == false runs-on: ubuntu-latest + env: + CC: sccache gcc + CXX: sccache g++ + SCCACHE_GHA_ENABLED: 'true' steps: - uses: actions/checkout@0ad4b8fadaa221de15dcec353f45205ec38ea70b # v4.1.4 with: @@ -41,6 +45,10 @@ jobs: uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d # v5.1.0 with: python-version: ${{ env.PYTHON_VERSION }} + - name: Set up sccache + uses: mozilla-actions/sccache-action@2e7f9ec7921547d4b46598398ca573513895d0bd # v0.0.4 + with: + version: v0.8.0 - name: Environment Information run: npx envinfo - name: Build diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 5e77678ab0c25c..fa39d006d6054d 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -126,7 +126,7 @@ jobs: npm ci NEW_VERSION=$(npm outdated --parseable | cut -d: -f4 | xargs) if [ "$NEW_VERSION" != "" ]; then - echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV + echo "NEW_VERSION=new version" >> $GITHUB_ENV rm -rf package-lock.json node_modules # Include $NEW_VERSION to explicitly update the package.json # entry for the dependency and also so that semver-major updates @@ -314,7 +314,7 @@ jobs: if: env.COMMIT_MSG == '' && (github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id) run: | echo "COMMIT_MSG=${{ matrix.subsystem }}: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}" >> "$GITHUB_ENV" - - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 + - uses: gr2m/create-or-update-pull-request-action@86ec1766034c8173518f61d2075cc2a173fb8c97 # v1.9.4 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index 9d13e87ed505c6..68616c318db057 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -35,21 +35,23 @@ jobs: node-version: ${{ env.NODE_VERSION }} - name: Install @node-core/utils run: npm install -g @node-core/utils + - name: Setup Git config + run: | + git config --global user.name "Node.js GitHub Bot" + git config --global user.email "github-bot@iojs.org" - name: Check and download new V8 version run: | ./tools/dep_updaters/update-v8-patch.sh > temp-output cat temp-output tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true rm temp-output - - uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 + - uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e # v6.0.5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. - env: - GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} with: - author: Node.js GitHub Bot - body: This is an automated patch update of V8 to ${{ env.NEW_VERSION }}. + token: ${{ secrets.GH_USER_TOKEN }} branch: actions/update-v8-patch # Custom branch *just* for this Action. - labels: v8 engine + delete-branch: true title: 'deps: patch V8 to ${{ env.NEW_VERSION }}' - update-pull-request-title-and-body: true + body: This is an automated patch update of V8 to ${{ env.NEW_VERSION }}. + labels: v8 engine diff --git a/CHANGELOG.md b/CHANGELOG.md index daca6ff0c6e392..e8153c8cafc376 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,7 +35,8 @@ release. -20.14.0
+20.15.0
+20.14.0
20.13.1
20.13.0
20.12.2
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f815adbb1fc34b..96cddfafb784c8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,5 +1,21 @@ # Contributing to Node.js +Contributions to Node.js include code, documentation, answering user questions, +running the project's infrastructure, and advocating for all types of Node.js +users. + +The Node.js project welcomes all contributions from anyone willing to work in +good faith with other contributors and the community. No contribution is too +small and all contributions are valued. + +The Node.js project has an open governance model. +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. See the +[GOVERNANCE.md](./GOVERNANCE.md) document for more information about how this +works. + +## Contents + * [Code of Conduct](#code-of-conduct) * [Issues](#issues) * [Pull Requests](#pull-requests) @@ -22,6 +38,9 @@ See [details on our policy on Code of Conduct](./doc/contributing/code-of-conduc ## [Pull Requests](./doc/contributing/pull-requests.md) +Pull Requests are the way concrete changes are made to the code, documentation, +dependencies, and tools contained in the `nodejs/node` repository. + * [Dependencies](./doc/contributing/pull-requests.md#dependencies) * [Setting up your local environment](./doc/contributing/pull-requests.md#setting-up-your-local-environment) * [The Process of Making Changes](./doc/contributing/pull-requests.md#the-process-of-making-changes) diff --git a/GOVERNANCE.md b/GOVERNANCE.md index 1a5faf99a480ec..f898396952a7d2 100644 --- a/GOVERNANCE.md +++ b/GOVERNANCE.md @@ -8,6 +8,9 @@ * [Technical steering committee](#technical-steering-committee) * [TSC meetings](#tsc-meetings) * [Collaborator nominations](#collaborator-nominations) + * [Who can nominate Collaborators?](#who-can-nominate-collaborators) + * [Ideal Nominees](#ideal-nominees) + * [Nominating a new Collaborator](#nominating-a-new-collaborator) * [Onboarding](#onboarding) * [Consensus seeking process](#consensus-seeking-process) @@ -124,11 +127,26 @@ the issue tracker is: ## Collaborator nominations -Existing collaborators can nominate someone to become a collaborator. Nominees -should have significant and valuable contributions across the Node.js +### Who can nominate Collaborators? + +Existing Collaborators can nominate someone to become a Collaborator. + +### Ideal Nominees + +Nominees should have significant and valuable contributions across the Node.js organization. -To nominate a new collaborator, open an issue in the [nodejs/node][] repository. +Contributions can be: + +* Opening pull requests. +* Comments and reviews. +* Opening new issues. +* Participation in other projects, teams, and working groups of the Node.js + organization. + +### Nominating a new Collaborator + +To nominate a new Collaborator, open an issue in the [nodejs/node][] repository. Provide a summary of the nominee's contributions. For example: * Commits in the [nodejs/node][] repository. diff --git a/Makefile b/Makefile index 475043ceae7eb6..f2009fff0c2f2b 100644 --- a/Makefile +++ b/Makefile @@ -8,6 +8,9 @@ PREFIX ?= /usr/local FLAKY_TESTS ?= run TEST_CI_ARGS ?= STAGINGSERVER ?= node-www +CLOUDFLARE_ENDPOINT ?= https://07be8d2fbc940503ca1be344714cb0d1.r2.cloudflarestorage.com +CLOUDFLARE_BUCKET ?= dist-staging +CLOUDFLARE_PROFILE ?= worker LOGLEVEL ?= silent OSTYPE := $(shell uname -s | tr '[:upper:]' '[:lower:]') ifeq ($(findstring os/390,$OSTYPE),os/390) @@ -1160,6 +1163,7 @@ pkg-upload: pkg ssh $(STAGINGSERVER) "mkdir -p nodejs/$(DISTTYPEDIR)/$(FULLVERSION)" chmod 664 $(TARNAME).pkg scp -p $(TARNAME).pkg $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).pkg + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).pkg s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).pkg --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).pkg.done" $(TARBALL): release-only doc-only @@ -1209,10 +1213,12 @@ tar-upload: tar ssh $(STAGINGSERVER) "mkdir -p nodejs/$(DISTTYPEDIR)/$(FULLVERSION)" chmod 664 $(TARNAME).tar.gz scp -p $(TARNAME).tar.gz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.gz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.gz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.gz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.gz.done" ifeq ($(XZ), 1) chmod 664 $(TARNAME).tar.xz scp -p $(TARNAME).tar.xz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.xz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.xz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.xz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME).tar.xz.done" endif @@ -1222,6 +1228,7 @@ doc-upload: doc ssh $(STAGINGSERVER) "mkdir -p nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs/" chmod -R ug=rw-x+X,o=r+X out/doc/ scp -pr out/doc/* $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs/ + ssh $(STAGINGSERVER) "aws s3 cp --recursive nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs/ --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/docs.done" .PHONY: $(TARBALL)-headers @@ -1250,10 +1257,12 @@ tar-headers-upload: tar-headers ssh $(STAGINGSERVER) "mkdir -p nodejs/$(DISTTYPEDIR)/$(FULLVERSION)" chmod 664 $(TARNAME)-headers.tar.gz scp -p $(TARNAME)-headers.tar.gz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.gz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.gz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.gz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.gz.done" ifeq ($(XZ), 1) chmod 664 $(TARNAME)-headers.tar.xz scp -p $(TARNAME)-headers.tar.xz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.xz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.xz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.xz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-headers.tar.xz.done" endif @@ -1295,10 +1304,12 @@ binary-upload: binary ssh $(STAGINGSERVER) "mkdir -p nodejs/$(DISTTYPEDIR)/$(FULLVERSION)" chmod 664 $(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz scp -p $(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.gz.done" ifeq ($(XZ), 1) chmod 664 $(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz scp -p $(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz $(STAGINGSERVER):nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz + ssh $(STAGINGSERVER) "aws s3 cp nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz s3://$(CLOUDFLARE_BUCKET)/nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz --endpoint=$(CLOUDFLARE_ENDPOINT) --profile=$(CLOUDFLARE_PROFILE)" ssh $(STAGINGSERVER) "touch nodejs/$(DISTTYPEDIR)/$(FULLVERSION)/$(TARNAME)-$(OSTYPE)-$(ARCH).tar.xz.done" endif diff --git a/README.md b/README.md index a7f7d19d048c31..8f3b9bfaeff0d3 100644 --- a/README.md +++ b/README.md @@ -164,8 +164,6 @@ For information about the governance of the Node.js project, see * [aduh95](https://github.com/aduh95) - **Antoine du Hamel** <> (he/him) -* [anonrig](https://github.com/anonrig) - - **Yagiz Nizipli** <> (he/him) * [apapirovski](https://github.com/apapirovski) - **Anatoli Papirovski** <> (he/him) * [benjamingr](https://github.com/benjamingr) - @@ -205,6 +203,8 @@ For information about the governance of the Node.js project, see #### TSC regular members +* [anonrig](https://github.com/anonrig) - + **Yagiz Nizipli** <> (he/him) * [BethGriggs](https://github.com/BethGriggs) - **Beth Griggs** <> (she/her) * [bnoordhuis](https://github.com/bnoordhuis) - @@ -411,6 +411,8 @@ For information about the governance of the Node.js project, see **Claudio Wunder** <> (he/they) * [panva](https://github.com/panva) - **Filip Skokan** <> (he/him) +* [pimterry](https://github.com/pimterry) - + **Tim Perry** <> (he/him) * [Qard](https://github.com/Qard) - **Stephen Belanger** <> (he/him) * [RafaelGSS](https://github.com/RafaelGSS) - @@ -771,7 +773,7 @@ Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): `8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600` * **Myles Borins** <> `C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8` -* **RafaelGSS** <> +* **Rafael Gonzaga** <> `890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4` * **Richard Lau** <> `C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C` @@ -784,17 +786,17 @@ To import the full set of trusted release keys (including subkeys possibly used to sign releases): ```bash -gpg --keyserver hkps://keys.openpgp.org --recv-keys 4ED778F539E3634C779C87C6D7062848A1AB005C -gpg --keyserver hkps://keys.openpgp.org --recv-keys 141F07595B7B3FFE74309A937405533BE57C7D57 -gpg --keyserver hkps://keys.openpgp.org --recv-keys 74F12602B6F1C4E913FAA37AD3A89613643B6201 -gpg --keyserver hkps://keys.openpgp.org --recv-keys DD792F5973C6DE52C432CBDAC77ABFA00DDBF2B7 -gpg --keyserver hkps://keys.openpgp.org --recv-keys CC68F5A3106FF448322E48ED27F5E38D5B0A215F -gpg --keyserver hkps://keys.openpgp.org --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 -gpg --keyserver hkps://keys.openpgp.org --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 -gpg --keyserver hkps://keys.openpgp.org --recv-keys 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 -gpg --keyserver hkps://keys.openpgp.org --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C -gpg --keyserver hkps://keys.openpgp.org --recv-keys 108F52B48DB57BB0CC439B2997B01419BD92F80A -gpg --keyserver hkps://keys.openpgp.org --recv-keys A363A499291CBBC940DD62E41F10027AF002F8B0 +gpg --keyserver hkps://keys.openpgp.org --recv-keys 4ED778F539E3634C779C87C6D7062848A1AB005C # Beth Griggs +gpg --keyserver hkps://keys.openpgp.org --recv-keys 141F07595B7B3FFE74309A937405533BE57C7D57 # Bryan English +gpg --keyserver hkps://keys.openpgp.org --recv-keys 74F12602B6F1C4E913FAA37AD3A89613643B6201 # Danielle Adams +gpg --keyserver hkps://keys.openpgp.org --recv-keys DD792F5973C6DE52C432CBDAC77ABFA00DDBF2B7 # Juan José Arboleda +gpg --keyserver hkps://keys.openpgp.org --recv-keys CC68F5A3106FF448322E48ED27F5E38D5B0A215F # Marco Ippolito +gpg --keyserver hkps://keys.openpgp.org --recv-keys 8FCCA13FEF1D0C2E91008E09770F7A9A5AE15600 # Michaël Zasso +gpg --keyserver hkps://keys.openpgp.org --recv-keys C4F0DFFF4E8C1A8236409D08E73BC641CC11F4C8 # Myles Borins +gpg --keyserver hkps://keys.openpgp.org --recv-keys 890C08DB8579162FEE0DF9DB8BEAB4DFCF555EF4 # Rafael Gonzaga +gpg --keyserver hkps://keys.openpgp.org --recv-keys C82FA3AE1CBEDC6BE46B9360C43CEC45C17AB93C # Richard Lau +gpg --keyserver hkps://keys.openpgp.org --recv-keys 108F52B48DB57BB0CC439B2997B01419BD92F80A # Ruy Adorno +gpg --keyserver hkps://keys.openpgp.org --recv-keys A363A499291CBBC940DD62E41F10027AF002F8B0 # Ulises Gascón ``` See [Verifying binaries](#verifying-binaries) for how to use these keys to diff --git a/benchmark/crypto/timingSafeEqual.js b/benchmark/crypto/timingSafeEqual.js deleted file mode 100644 index 475807dba4ea4e..00000000000000 --- a/benchmark/crypto/timingSafeEqual.js +++ /dev/null @@ -1,22 +0,0 @@ -'use strict'; - -const common = require('../common.js'); -const assert = require('node:assert'); -const { randomBytes, timingSafeEqual } = require('node:crypto'); - -const bench = common.createBenchmark(main, { - n: [1e5], - bufferSize: [10, 100, 200, 2_100, 22_023], -}); - -function main({ n, bufferSize }) { - const bufs = [randomBytes(bufferSize), randomBytes(bufferSize)]; - bench.start(); - let count = 0; - for (let i = 0; i < n; i++) { - const ret = timingSafeEqual(bufs[i % 2], bufs[1]); - if (ret) count++; - } - bench.end(n); - assert.strictEqual(count, Math.floor(n / 2)); -} diff --git a/benchmark/http/bench-parser.js b/benchmark/http/bench-parser.js index 6b8bbd8808c720..f2c120cd97f133 100644 --- a/benchmark/http/bench-parser.js +++ b/benchmark/http/bench-parser.js @@ -46,7 +46,7 @@ function main({ len, n }) { let header = `GET /hello HTTP/1.1${CRLF}Content-Type: text/plain${CRLF}`; for (let i = 0; i < len; i++) { - header += `X-Filler${i}: ${Math.random().toString(36).substr(2)}${CRLF}`; + header += `X-Filler${i}: ${Math.random().toString(36).substring(2)}${CRLF}`; } header += CRLF; diff --git a/benchmark/misc/startup-cli-version.js b/benchmark/misc/startup-cli-version.js index 4dbeb81fc70740..0c8d4848b46cd4 100644 --- a/benchmark/misc/startup-cli-version.js +++ b/benchmark/misc/startup-cli-version.js @@ -9,13 +9,15 @@ const path = require('path'); // tends to be minimal and fewer operations are done to generate // these so that the startup cost is still dominated by a more // indispensible part of the CLI. +// NOTE: not all tools are present in tarball hence need to filter +const availableCli = [ + 'tools/node_modules/eslint/bin/eslint.js', + 'deps/npm/bin/npx-cli.js', + 'deps/npm/bin/npm-cli.js', + 'deps/corepack/dist/corepack.js', +].filter((cli) => existsSync(path.resolve(__dirname, '../../', cli))); const bench = common.createBenchmark(main, { - cli: [ - 'tools/node_modules/eslint/bin/eslint.js', - 'deps/npm/bin/npx-cli.js', - 'deps/npm/bin/npm-cli.js', - 'deps/corepack/dist/corepack.js', - ], + cli: availableCli, count: [30], }); @@ -47,10 +49,6 @@ function spawnProcess(cli, bench, state) { function main({ count, cli }) { cli = path.resolve(__dirname, '../../', cli); - if (!existsSync(cli)) { - return; - } - const warmup = 3; const state = { count, finished: -warmup }; spawnProcess(cli, bench, state); diff --git a/configure.py b/configure.py index 11c0df455451d4..f189ba2bf09fc2 100755 --- a/configure.py +++ b/configure.py @@ -403,6 +403,74 @@ dest='shared_zlib_libpath', help='a directory to search for the shared zlib DLL') +shared_optgroup.add_argument('--shared-simdjson', + action='store_true', + dest='shared_simdjson', + default=None, + help='link to a shared simdjson DLL instead of static linking') + +shared_optgroup.add_argument('--shared-simdjson-includes', + action='store', + dest='shared_simdjson_includes', + help='directory containing simdjson header files') + +shared_optgroup.add_argument('--shared-simdjson-libname', + action='store', + dest='shared_simdjson_libname', + default='simdjson', + help='alternative lib name to link to [default: %(default)s]') + +shared_optgroup.add_argument('--shared-simdjson-libpath', + action='store', + dest='shared_simdjson_libpath', + help='a directory to search for the shared simdjson DLL') + + +shared_optgroup.add_argument('--shared-simdutf', + action='store_true', + dest='shared_simdutf', + default=None, + help='link to a shared simdutf DLL instead of static linking') + +shared_optgroup.add_argument('--shared-simdutf-includes', + action='store', + dest='shared_simdutf_includes', + help='directory containing simdutf header files') + +shared_optgroup.add_argument('--shared-simdutf-libname', + action='store', + dest='shared_simdutf_libname', + default='simdutf', + help='alternative lib name to link to [default: %(default)s]') + +shared_optgroup.add_argument('--shared-simdutf-libpath', + action='store', + dest='shared_simdutf_libpath', + help='a directory to search for the shared simdutf DLL') + + +shared_optgroup.add_argument('--shared-ada', + action='store_true', + dest='shared_ada', + default=None, + help='link to a shared ada DLL instead of static linking') + +shared_optgroup.add_argument('--shared-ada-includes', + action='store', + dest='shared_ada_includes', + help='directory containing ada header files') + +shared_optgroup.add_argument('--shared-ada-libname', + action='store', + dest='shared_ada_libname', + default='ada', + help='alternative lib name to link to [default: %(default)s]') + +shared_optgroup.add_argument('--shared-ada-libpath', + action='store', + dest='shared_ada_libpath', + help='a directory to search for the shared ada DLL') + shared_optgroup.add_argument('--shared-brotli', action='store_true', dest='shared_brotli', @@ -447,8 +515,6 @@ dest='shared_cares_libpath', help='a directory to search for the shared cares DLL') -parser.add_argument_group(shared_optgroup) - for builtin in shareable_builtins: builtin_id = 'shared_builtin_' + builtin + '_path' shared_builtin_optgroup.add_argument('--shared-builtin-' + builtin + '-path', @@ -457,15 +523,11 @@ help='Path to shared file for ' + builtin + ' builtin. ' 'Will be used instead of bundled version at runtime') -parser.add_argument_group(shared_builtin_optgroup) - static_optgroup.add_argument('--static-zoslib-gyp', action='store', dest='static_zoslib_gyp', help='path to zoslib.gyp file for includes and to link to static zoslib library') -parser.add_argument_group(static_optgroup) - parser.add_argument('--tag', action='store', dest='tag', @@ -645,8 +707,6 @@ default='deps', help='Download directory [default: %(default)s]') -parser.add_argument_group(intl_optgroup) - parser.add_argument('--debug-lib', action='store_true', dest='node_debug_lib', @@ -659,8 +719,6 @@ default=None, help='build nghttp2 with DEBUGBUILD (default is false)') -parser.add_argument_group(http2_optgroup) - parser.add_argument('--without-npm', action='store_true', dest='without_npm', @@ -2050,6 +2108,9 @@ def make_bin_override(): configure_library('zlib', output) configure_library('http_parser', output) configure_library('libuv', output) +configure_library('ada', output) +configure_library('simdjson', output) +configure_library('simdutf', output) configure_library('brotli', output, pkgname=['libbrotlidec', 'libbrotlienc']) configure_library('cares', output, pkgname='libcares') configure_library('nghttp2', output, pkgname='libnghttp2') diff --git a/doc/api/assert.md b/doc/api/assert.md index 92121c42f20a46..7e58c66542039f 100644 --- a/doc/api/assert.md +++ b/doc/api/assert.md @@ -555,6 +555,9 @@ An alias of [`assert.ok()`][]. + +By default, Node.js enables trap-handler-based WebAssembly bound +checks. As a result, V8 does not need to insert inline bound checks +int the code compiled from WebAssembly which may speedup WebAssembly +execution significantly, but this optimization requires allocating +a big virtual memory cage (currently 10GB). If the Node.js process +does not have access to a large enough virtual memory address space +due to system configurations or hardware limitations, users won't +be able to run any WebAssembly that involves allocation in this +virtual memory cage and will see an out-of-memory error. + +```console +$ ulimit -v 5000000 +$ node -p "new WebAssembly.Memory({ initial: 10, maximum: 100 });" +[eval]:1 +new WebAssembly.Memory({ initial: 10, maximum: 100 }); +^ + +RangeError: WebAssembly.Memory(): could not allocate memory + at [eval]:1:1 + at runScriptInThisContext (node:internal/vm:209:10) + at node:internal/process/execution:118:14 + at [eval]-wrapper:6:24 + at runScript (node:internal/process/execution:101:62) + at evalScript (node:internal/process/execution:136:3) + at node:internal/main/eval_string:49:3 + +``` + +`--disable-wasm-trap-handler` disables this optimization so that +users can at least run WebAssembly (with less optimal performance) +when the virtual memory address space available to their Node.js +process is lower than what the V8 WebAssembly memory cage needs. + ### `--disable-proto=mode` @@ -1226,6 +1266,8 @@ added: v7.6.0 Activate inspector on `host:port` and break at start of user script. Default `host:port` is `127.0.0.1:9229`. +See [V8 Inspector integration for Node.js][] for further explanation on Node.js debugger. + ### `--inspect-port=[host:]port` + +Activate inspector on `host:port` and wait for debugger to be attached. +Default `host:port` is `127.0.0.1:9229`. + +See [V8 Inspector integration for Node.js][] for further explanation on Node.js debugger. + ### `-i`, `--interactive` -Closes all connections connected to this server. +Closes all connections connected to this server, including active connections +connected to this server which are sending a request or waiting for a response. + +> This is a forceful way of closing all connections and should be used with +> caution. Whenever using this in conjunction with `server.close`, calling this +> _after_ `server.close` is recommended as to avoid race conditions where new +> connections are created between a call to this and a call to `server.close`. + +```js +const http = require('node:http'); + +const server = http.createServer({ keepAliveTimeout: 60000 }, (req, res) => { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + data: 'Hello World!', + })); +}); + +server.listen(8000); +// Close the server after 10 seconds +setTimeout(() => { + server.close(() => { + console.log('server on port 8000 closed successfully'); + }); + // Closes all connections, ensuring the server closes successfully + server.closeAllConnections(); +}, 10000); +``` ### `server.closeIdleConnections()` @@ -1696,6 +1742,37 @@ added: v18.2.0 Closes all connections connected to this server which are not sending a request or waiting for a response. +> Starting with Node.js 19.0.0, there's no need for calling this method in +> conjunction with `server.close` to reap `keep-alive` connections. Using it +> won't cause any harm though, and it can be useful to ensure backwards +> compatibility for libraries and applications that need to support versions +> older than 19.0.0. Whenever using this in conjunction with `server.close`, +> calling this _after_ `server.close` is recommended as to avoid race +> conditions where new connections are created between a call to this and a +> call to `server.close`. + +```js +const http = require('node:http'); + +const server = http.createServer({ keepAliveTimeout: 60000 }, (req, res) => { + res.writeHead(200, { 'Content-Type': 'application/json' }); + res.end(JSON.stringify({ + data: 'Hello World!', + })); +}); + +server.listen(8000); +// Close the server after 10 seconds +setTimeout(() => { + server.close(() => { + console.log('server on port 8000 closed successfully'); + }); + // Closes idle connections, such as keep-alive connections. Server will close + // once remaining active connections are terminated + server.closeIdleConnections(); +}, 10000); +``` + ### `server.headersTimeout` * `streams` {Stream\[]|Iterable\[]|AsyncIterable\[]|Function\[]} @@ -76,9 +85,11 @@ added: v15.0.0 * `destination` {Stream|Function} * `source` {AsyncIterable} * Returns: {Promise|AsyncIterable} -* `options` {Object} +* `options` {Object} Pipeline options * `signal` {AbortSignal} - * `end` {boolean} + * `end` {boolean} End the destination stream when the source stream ends. + Transform streams are always ended, even if this value is `false`. + **Default:** `true`. * Returns: {Promise} Fulfills when the pipeline is complete. ```cjs @@ -317,7 +328,9 @@ buffer. The amount of data potentially buffered depends on the `highWaterMark` option passed into the stream's constructor. For normal streams, the `highWaterMark` option specifies a [total number of bytes][hwm-gotcha]. For streams operating -in object mode, the `highWaterMark` specifies a total number of objects. +in object mode, the `highWaterMark` specifies a total number of objects. For +streams operating on (but not decoding) strings, the `highWaterMark` specifies +a total number of UTF-16 code units. Data is buffered in `Readable` streams when the implementation calls [`stream.push(chunk)`][stream-push]. If the consumer of the Stream does not diff --git a/doc/api/test.md b/doc/api/test.md index d1a0b8bee358b2..87919c01df316e 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -1325,6 +1325,10 @@ changes: * `timeout` {number} A number of milliseconds the test will fail after. If unspecified, subtests inherit this value from their parent. **Default:** `Infinity`. + * `plan` {number} The number of assertions and subtests expected to be run in the test. + If the number of assertions run in the test does not match the number + specified in the plan, the test will fail. + **Default:** `undefined`. * `fn` {Function|AsyncFunction} The function under test. The first argument to this function is a [`TestContext`][] object. If the test uses callbacks, the callback function is passed as the second argument. **Default:** A no-op @@ -2912,6 +2916,54 @@ added: The name of the test. +### `context.plan(count)` + + + +> Stability: 1 - Experimental + +* `count` {number} The number of assertions and subtests that are expected to run. + +This function is used to set the number of assertions and subtests that are expected to run +within the test. If the number of assertions and subtests that run does not match the +expected count, the test will fail. + +> Note: To make sure assertions are tracked, `t.assert` must be used instead of `assert` directly. + +```js +test('top level test', (t) => { + t.plan(2); + t.assert.ok('some relevant assertion here'); + t.subtest('subtest', () => {}); +}); +``` + +When working with asynchronous code, the `plan` function can be used to ensure that the +correct number of assertions are run: + +```js +test('planning with streams', (t, done) => { + function* generate() { + yield 'a'; + yield 'b'; + yield 'c'; + } + const expected = ['a', 'b', 'c']; + t.plan(expected.length); + const stream = Readable.from(generate()); + stream.on('data', (chunk) => { + t.assert.strictEqual(chunk, expected.shift()); + }); + + stream.on('end', () => { + done(); + }); +}); +``` + ### `context.runOnly(shouldRunOnlyTests)` + +* `data` {string|Buffer|TypedArray|DataView} When `data` is a string, + it will be encoded as UTF-8 before being used for computation. +* `value` {integer} An optional starting value. It must be a 32-bit unsigned + integer. **Default:** `0` +* Returns: {integer} A 32-bit unsigned integer containing the checksum. + +Computes a 32-bit [Cyclic Redundancy Check][] checksum of `data`. If +`value` is specified, it is used as the starting value of the checksum, +otherwise, 0 is used as the starting value. + +The CRC algorithm is designed to compute checksums and to detect error +in data transmission. It's not suitable for cryptographic authentication. + +To be consistent with other APIs, if the `data` is a string, it will +be encoded with UTF-8 before being used for computation. If users only +use Node.js to compute and match the checksums, this works well with +other APIs that uses the UTF-8 encoding by default. + +Some third-party JavaScript libraries compute the checksum on a +string based on `str.charCodeAt()` so that it can be run in browsers. +If users want to match the checksum computed with this kind of library +in the browser, it's better to use the same library in Node.js +if it also runs in Node.js. If users have to use `zlib.crc32()` to +match the checksum produced by such a third-party library: + +1. If the library accepts `Uint8Array` as input, use `TextEncoder` + in the browser to encode the string into a `Uint8Array` with UTF-8 + encoding, and compute the checksum based on the UTF-8 encoded string + in the browser. +2. If the library only takes a string and compute the data based on + `str.charCodeAt()`, on the Node.js side, convert the string into + a buffer using `Buffer.from(str, 'utf16le')`. + +```mjs +import zlib from 'node:zlib'; +import { Buffer } from 'node:buffer'; + +let crc = zlib.crc32('hello'); // 907060870 +crc = zlib.crc32('world', crc); // 4192936109 + +crc = zlib.crc32(Buffer.from('hello', 'utf16le')); // 1427272415 +crc = zlib.crc32(Buffer.from('world', 'utf16le'), crc); // 4150509955 +``` + +```cjs +const zlib = require('node:zlib'); +const { Buffer } = require('node:buffer'); + +let crc = zlib.crc32('hello'); // 907060870 +crc = zlib.crc32('world', crc); // 4192936109 + +crc = zlib.crc32(Buffer.from('hello', 'utf16le')); // 1427272415 +crc = zlib.crc32(Buffer.from('world', 'utf16le'), crc); // 4150509955 +``` + ### `zlib.close([callback])`