diff --git a/.eslintrc.js b/.eslintrc.js index e1961de42dea0a..5a63c79371c984 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -306,6 +306,7 @@ module.exports = { 'jsdoc/newline-after-description': 'off', 'jsdoc/require-returns-description': 'off', 'jsdoc/valid-types': 'off', + 'jsdoc/no-defaults': 'off', 'jsdoc/no-undefined-types': 'off', 'jsdoc/require-param': 'off', 'jsdoc/check-tag-names': 'off', diff --git a/.github/FAILED_DEP_UPDATE_ISSUE_TEMPLATE.md b/.github/FAILED_DEP_UPDATE_ISSUE_TEMPLATE.md deleted file mode 100644 index c1f35e66ed48fe..00000000000000 --- a/.github/FAILED_DEP_UPDATE_ISSUE_TEMPLATE.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -title: 'deps: update {{ env.FAILED_DEP }} job failed' -labels: dependencies ---- -This is an automatically generated issue by the {{ tools.context.action }} GitHub Action. -The update [workflow]({{ env.JOB_URL }}) has failed for {{ tools.context.workflow }}. -@nodejs/security-wg @nodejs/actions \ No newline at end of file diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 6611f96c1f5dae..dd3e2b51038a8d 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -43,7 +43,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -69,7 +69,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml index 80e08205f04a97..df615f35eda97d 100644 --- a/.github/workflows/build-windows.yml +++ b/.github/workflows/build-windows.yml @@ -42,7 +42,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install deps diff --git a/.github/workflows/close-stale-pull-requests.yml b/.github/workflows/close-stale-pull-requests.yml index 05f1732f78df5d..b18cd5c37e69c4 100644 --- a/.github/workflows/close-stale-pull-requests.yml +++ b/.github/workflows/close-stale-pull-requests.yml @@ -1,4 +1,4 @@ -name: Close stale feature requests +name: Close stale pull requests on: workflow_dispatch: inputs: @@ -6,9 +6,6 @@ on: description: stop processing PRs after this date required: false type: string - schedule: - # Run every day at 1:00 AM UTC. - - cron: 0 1 * * * # yamllint disable rule:empty-lines env: @@ -51,7 +48,6 @@ jobs: end-date: ${{ env.END_DATE }} days-before-issue-stale: -1 days-before-issue-close: -1 - only-labels: test-stale-pr days-before-stale: 150 days-before-close: 30 stale-issue-label: stale diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index aca32133511030..bc112c6dbb798a 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -41,7 +41,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -64,6 +64,6 @@ jobs: - name: Clean tmp run: rm -rf coverage/tmp && rm -rf out - name: Upload - uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # v3.1.3 + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4 with: directory: ./coverage diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 45520bf605b55a..5f33d2e3e296fc 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -41,7 +41,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -64,6 +64,6 @@ jobs: - name: Clean tmp run: rm -rf coverage/tmp && rm -rf out - name: Upload - uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # v3.1.3 + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4 with: directory: ./coverage diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index 6869b2f37fedae..b634227a1b4844 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -43,7 +43,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Install deps @@ -65,6 +65,6 @@ jobs: - name: Clean tmp run: npx rimraf ./coverage/tmp - name: Upload - uses: codecov/codecov-action@894ff025c7b54547a9a2a1e9f228beae737ad3c2 # v3.1.3 + uses: codecov/codecov-action@eaaf4bedf32dbdc6b720b63067d99c4d77d6047d # v3.1.4 with: directory: ./coverage diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index 58554e98facaab..27506cd5ed0a44 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -33,7 +33,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index a481e5be4e310a..c1f79a9955ff01 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -44,7 +44,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -64,7 +64,7 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -122,7 +122,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information @@ -139,7 +139,7 @@ jobs: with: persist-credentials: false - name: Use Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 3f00950b2ab0e4..6549203d566f32 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -73,6 +73,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: Upload to code-scanning - uses: github/codeql-action/upload-sarif@29b1f65c5e92e24fe6b6647da1eaabe529cec70f # v2.3.3 + uses: github/codeql-action/upload-sarif@83f0fe6c4988d98a455712a27f0255212bba9bd4 # v2.3.6 with: sarif_file: results.sarif diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index b574479cc063d3..b2cd66be992352 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -51,7 +51,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index a803f6aacb5ed1..07e9ee23688af0 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -36,7 +36,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index efa821a4ee09e7..ad9a8851a24c37 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -38,7 +38,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index b9a44445608ae9..313129dee34981 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -44,7 +44,7 @@ jobs: with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - name: Environment Information diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 5b5d79dbb58a25..4e66d7ba6eb821 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -282,7 +282,7 @@ jobs: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} if: matrix.id == 'icu' && (github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id) - uses: actions/setup-python@57ded4d7d5e986d7296eab16560982c6dd7c923b # v4.6.0 + uses: actions/setup-python@bd6b4b6205c4dbad673328db7b31b7fab9e241c0 # v4.6.1 with: python-version: ${{ env.PYTHON_VERSION }} - run: ${{ matrix.run }} @@ -307,14 +307,3 @@ jobs: labels: ${{ matrix.label }} title: '${{ matrix.subsystem }}: update ${{ matrix.id }} to ${{ env.NEW_VERSION }}' update-pull-request-title-and-body: true - - name: Open issue on fail - id: create-issue - if: github.event_name == 'schedule' && ${{ failure() }} - uses: JasonEtco/create-an-issue@e27dddc79c92bc6e4562f268fffa5ed752639abd # 2.9.1 - env: - GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} - FAILED_DEP: ${{ matrix.id }} - JOB_URL: ${{ github.event.repository.html_url }}/actions/runs/${{ github.run_id }} - with: - filename: .github/FAILED_DEP_UPDATE_ISSUE_TEMPLATE.md - update_existing: true diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index 681629fd00e019..ee42ebdb778c7f 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -39,7 +39,7 @@ jobs: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} - name: Create PR with first commit if: env.HAS_UPDATE - uses: gr2m/create-or-update-pull-request-action@df20b2c073090271599a08c55ae26e0c3522b329 # v1.9.2 + uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 # Creates a PR with the new OpenSSL source code committed env: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} @@ -62,7 +62,7 @@ jobs: - name: Add second commit # Adds a second commit to the PR with the generated platform-dependent files if: env.HAS_UPDATE - uses: gr2m/create-or-update-pull-request-action@df20b2c073090271599a08c55ae26e0c3522b329 # v1.9.2 + uses: gr2m/create-or-update-pull-request-action@77596e3166f328b24613f7082ab30bf2d93079d5 env: GITHUB_TOKEN: ${{ secrets.GH_USER_TOKEN }} with: diff --git a/CHANGELOG.md b/CHANGELOG.md index 75b992b3f40e93..f567ed31de2a31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -35,7 +35,8 @@ release.
strict-peer-deps
When such an override is performed, a warning is printed, explaining the
conflict and the packages involved. If --strict-peer-deps
is set, then
this warning is treated as a failure.
package-lock
If set to false, then ignore package-lock.json
files when installing. This
-will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
foreground-scripts
package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
omit
NODE_ENV
environment variable is set to
diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html
index 82d5eba473f1df..725f197e8503d4 100644
--- a/deps/npm/docs/output/commands/npm-find-dupes.html
+++ b/deps/npm/docs/output/commands/npm-find-dupes.html
@@ -206,7 +206,6 @@ package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
omit
NODE_ENV
environment variable is set to
diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
index 1afa069a3dc259..f87de26e312b18 100644
--- a/deps/npm/docs/output/commands/npm-install-ci-test.html
+++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
@@ -142,7 +142,7 @@ strict-peer-deps
When such an override is performed, a warning is printed, explaining the
conflict and the packages involved. If --strict-peer-deps
is set, then
this warning is treated as a failure.
package-lock
If set to false, then ignore package-lock.json
files when installing. This
-will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
foreground-scripts
strict-peer-deps
When such an override is performed, a warning is printed, explaining the
conflict and the packages involved. If --strict-peer-deps
is set, then
this warning is treated as a failure.
prefer-dedupe
Prefer to deduplicate packages if possible, rather than choosing a newer +version of a dependency.
package-lock
package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
foreground-scripts
strict-peer-deps
When such an override is performed, a warning is printed, explaining the
conflict and the packages involved. If --strict-peer-deps
is set, then
this warning is treated as a failure.
prefer-dedupe
Prefer to deduplicate packages if possible, rather than choosing a newer +version of a dependency.
package-lock
package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
foreground-scripts
package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
This configuration does not affect npm ci
.
omit
NODE_ENV
environment variable is set to
diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html
index 4aeaf0cb69c526..36f582e7548d91 100644
--- a/deps/npm/docs/output/commands/npm-ls.html
+++ b/deps/npm/docs/output/commands/npm-ls.html
@@ -160,7 +160,7 @@ npm ls promzard
in npm's source tree will show:
-npm@9.6.7 /path/to/npm
+npm@9.7.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html
index 6ebc39d6274a1d..24e715e4dbec9d 100644
--- a/deps/npm/docs/output/commands/npm-publish.html
+++ b/deps/npm/docs/output/commands/npm-publish.html
@@ -142,7 +142,7 @@ npm-publish
Table of contents
-
+
Synopsis
@@ -313,6 +313,14 @@ provenance
When publishing from a supported cloud CI/CD system, the package will be
publicly linked to where it was built and published from.
+This config can not be used with: provenance-file
+provenance-file
+
+- Default: null
+- Type: Path
+
+When publishing, the provenance bundle at the given path will be used.
+This config can not be used with: provenance
See Also
- package spec
diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html
index 98a9f08418a14e..4a4e6c274a09e7 100644
--- a/deps/npm/docs/output/commands/npm-uninstall.html
+++ b/deps/npm/docs/output/commands/npm-uninstall.html
@@ -142,7 +142,7 @@ npm-uninstall
Table of contents
-
+
Synopsis
@@ -187,6 +187,20 @@ save
When used with the npm rm
command, removes the dependency from
package.json
.
Will also prevent writing to package-lock.json
if set to false
.
+global
+
+- Default: false
+- Type: Boolean
+
+Operates in "global" mode, so that packages are installed into the prefix
+folder instead of the current working directory. See
+folders for more on the differences in behavior.
+
+- packages are installed into the
{prefix}/lib/node_modules
folder, instead
+of the current working directory.
+- bin files are linked to
{prefix}/bin
+- man pages are linked to
{prefix}/share/man
+
workspace
- Default:
diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html
index 32c3da827ca805..373a2ae22f9235 100644
--- a/deps/npm/docs/output/commands/npm-update.html
+++ b/deps/npm/docs/output/commands/npm-update.html
@@ -342,7 +342,6 @@ package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
-This configuration does not affect npm ci
.
foreground-scripts
- Default: false
diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html
index 0cfc2ef24f9661..5e4ca681df2696 100644
--- a/deps/npm/docs/output/commands/npm.html
+++ b/deps/npm/docs/output/commands/npm.html
@@ -150,7 +150,7 @@ Table of contents
Note: This command is unaware of workspaces.
Version
-9.6.7
+9.7.1
Description
npm is the package manager for the Node JavaScript platform. It puts
modules in place so that node can find them, and manages dependency
diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html
index ba718decd27ed1..440c4f29e97f10 100644
--- a/deps/npm/docs/output/using-npm/config.html
+++ b/deps/npm/docs/output/using-npm/config.html
@@ -142,7 +142,7 @@
config
Table of contents
-- Description
- Shorthands and Other CLI Niceties
- Config Settings
_auth
access
all
allow-same-version
audit
audit-level
auth-type
before
bin-links
browser
ca
cache
cafile
call
cidr
color
commit-hooks
depth
description
diff
diff-dst-prefix
diff-ignore-all-space
diff-name-only
diff-no-prefix
diff-src-prefix
diff-text
diff-unified
dry-run
editor
engine-strict
fetch-retries
fetch-retry-factor
fetch-retry-maxtimeout
fetch-retry-mintimeout
fetch-timeout
force
foreground-scripts
format-package-lock
fund
git
git-tag-version
global
globalconfig
heading
https-proxy
if-present
ignore-scripts
include
include-staged
include-workspace-root
init-author-email
init-author-name
init-author-url
init-license
init-module
init-version
install-links
install-strategy
json
legacy-peer-deps
link
local-address
location
lockfile-version
loglevel
logs-dir
logs-max
long
maxsockets
message
node-options
noproxy
offline
omit
omit-lockfile-registry-resolved
otp
pack-destination
package
package-lock
package-lock-only
parseable
prefer-offline
prefer-online
prefix
preid
progress
provenance
proxy
read-only
rebuild-bundle
registry
replace-registry-host
save
save-bundle
save-dev
save-exact
save-optional
save-peer
save-prefix
save-prod
scope
script-shell
searchexclude
searchlimit
searchopts
searchstaleness
shell
sign-git-commit
sign-git-tag
strict-peer-deps
strict-ssl
tag
tag-version-prefix
timing
umask
unicode
update-notifier
usage
user-agent
userconfig
version
versions
viewer
which
workspace
workspaces
workspaces-update
yes
also
cache-max
cache-min
cert
ci-name
dev
global-style
init.author.email
init.author.name
init.author.url
init.license
init.module
init.version
key
legacy-bundling
only
optional
production
shrinkwrap
tmp
- See also
+- Description
- Shorthands and Other CLI Niceties
- Config Settings
_auth
access
all
allow-same-version
audit
audit-level
auth-type
before
bin-links
browser
ca
cache
cafile
call
cidr
color
commit-hooks
depth
description
diff
diff-dst-prefix
diff-ignore-all-space
diff-name-only
diff-no-prefix
diff-src-prefix
diff-text
diff-unified
dry-run
editor
engine-strict
fetch-retries
fetch-retry-factor
fetch-retry-maxtimeout
fetch-retry-mintimeout
fetch-timeout
force
foreground-scripts
format-package-lock
fund
git
git-tag-version
global
globalconfig
heading
https-proxy
if-present
ignore-scripts
include
include-staged
include-workspace-root
init-author-email
init-author-name
init-author-url
init-license
init-module
init-version
install-links
install-strategy
json
legacy-peer-deps
link
local-address
location
lockfile-version
loglevel
logs-dir
logs-max
long
maxsockets
message
node-options
noproxy
offline
omit
omit-lockfile-registry-resolved
otp
pack-destination
package
package-lock
package-lock-only
parseable
prefer-dedupe
prefer-offline
prefer-online
prefix
preid
progress
provenance
provenance-file
proxy
read-only
rebuild-bundle
registry
replace-registry-host
save
save-bundle
save-dev
save-exact
save-optional
save-peer
save-prefix
save-prod
scope
script-shell
searchexclude
searchlimit
searchopts
searchstaleness
shell
sign-git-commit
sign-git-tag
strict-peer-deps
strict-ssl
tag
tag-version-prefix
timing
umask
unicode
update-notifier
usage
user-agent
userconfig
version
versions
viewer
which
workspace
workspaces
workspaces-update
yes
also
cache-max
cache-min
cert
ci-name
dev
global-style
init.author.email
init.author.name
init.author.url
init.license
init.module
init.version
key
legacy-bundling
only
optional
production
shrinkwrap
tmp
- See also
Description
@@ -922,7 +922,6 @@ package-lock
If set to false, then ignore package-lock.json
files when installing. This
will also prevent writing package-lock.json
if save
is true.
-This configuration does not affect npm ci
.
package-lock-only
- Default: false
@@ -941,6 +940,13 @@ parseable
Output parseable results from commands that write to standard output. For
npm search
, this will be tab-separated table format.
+prefer-dedupe
+
+- Default: false
+- Type: Boolean
+
+Prefer to deduplicate packages if possible, rather than choosing a newer
+version of a dependency.
prefer-offline
- Default: false
@@ -987,6 +993,14 @@ provenance
When publishing from a supported cloud CI/CD system, the package will be
publicly linked to where it was built and published from.
+This config can not be used with: provenance-file
+provenance-file
+
+- Default: null
+- Type: Path
+
+When publishing, the provenance bundle at the given path will be used.
+This config can not be used with: provenance
proxy
- Default: null
diff --git a/deps/npm/lib/base-command.js b/deps/npm/lib/base-command.js
index 0adff8e5d95ea8..598964ce524e39 100644
--- a/deps/npm/lib/base-command.js
+++ b/deps/npm/lib/base-command.js
@@ -2,17 +2,81 @@
const { relative } = require('path')
-const ConfigDefinitions = require('./utils/config/definitions.js')
+const definitions = require('./utils/config/definitions.js')
const getWorkspaces = require('./workspaces/get-workspaces.js')
-
-const cmdAliases = require('./utils/cmd-list').aliases
+const { aliases: cmdAliases } = require('./utils/cmd-list')
class BaseCommand {
static workspaces = false
static ignoreImplicitWorkspace = true
+ // these are all overridden by individual commands
+ static name = null
+ static description = null
+ static params = null
+
+ // this is a static so that we can read from it without instantiating a command
+ // which would require loading the config
+ static get describeUsage () {
+ const seenExclusive = new Set()
+ const wrapWidth = 80
+ const { description, usage = [''], name, params } = this
+
+ const fullUsage = [
+ `${description}`,
+ '',
+ 'Usage:',
+ ...usage.map(u => `npm ${name} ${u}`.trim()),
+ ]
+
+ if (params) {
+ let results = ''
+ let line = ''
+ for (const param of params) {
+ /* istanbul ignore next */
+ if (seenExclusive.has(param)) {
+ continue
+ }
+ const { exclusive } = definitions[param]
+ let paramUsage = `${definitions[param].usage}`
+ if (exclusive) {
+ const exclusiveParams = [paramUsage]
+ seenExclusive.add(param)
+ for (const e of exclusive) {
+ seenExclusive.add(e)
+ exclusiveParams.push(definitions[e].usage)
+ }
+ paramUsage = `${exclusiveParams.join('|')}`
+ }
+ paramUsage = `[${paramUsage}]`
+ if (line.length + paramUsage.length > wrapWidth) {
+ results = [results, line].filter(Boolean).join('\n')
+ line = ''
+ }
+ line = [line, paramUsage].filter(Boolean).join(' ')
+ }
+ fullUsage.push('')
+ fullUsage.push('Options:')
+ fullUsage.push([results, line].filter(Boolean).join('\n'))
+ }
+
+ const aliases = Object.entries(cmdAliases).reduce((p, [k, v]) => {
+ return p.concat(v === name ? k : [])
+ }, [])
+
+ if (aliases.length) {
+ const plural = aliases.length === 1 ? '' : 'es'
+ fullUsage.push('')
+ fullUsage.push(`alias${plural}: ${aliases.join(', ')}`)
+ }
+
+ fullUsage.push('')
+ fullUsage.push(`Run "npm help ${name}" for more info`)
+
+ return fullUsage.join('\n')
+ }
+
constructor (npm) {
- this.wrapWidth = 80
this.npm = npm
const { config } = this.npm
@@ -39,59 +103,7 @@ class BaseCommand {
}
get usage () {
- const usage = [
- `${this.description}`,
- '',
- 'Usage:',
- ]
-
- if (!this.constructor.usage) {
- usage.push(`npm ${this.name}`)
- } else {
- usage.push(...this.constructor.usage.map(u => `npm ${this.name} ${u}`))
- }
-
- if (this.params) {
- usage.push('')
- usage.push('Options:')
- usage.push(this.wrappedParams)
- }
-
- const aliases = Object.keys(cmdAliases).reduce((p, c) => {
- if (cmdAliases[c] === this.name) {
- p.push(c)
- }
- return p
- }, [])
-
- if (aliases.length === 1) {
- usage.push('')
- usage.push(`alias: ${aliases.join(', ')}`)
- } else if (aliases.length > 1) {
- usage.push('')
- usage.push(`aliases: ${aliases.join(', ')}`)
- }
-
- usage.push('')
- usage.push(`Run "npm help ${this.name}" for more info`)
-
- return usage.join('\n')
- }
-
- get wrappedParams () {
- let results = ''
- let line = ''
-
- for (const param of this.params) {
- const usage = `[${ConfigDefinitions[param].usage}]`
- if (line.length && line.length + usage.length > this.wrapWidth) {
- results = [results, line].filter(Boolean).join('\n')
- line = ''
- }
- line = [line, usage].filter(Boolean).join(' ')
- }
- results = [results, line].filter(Boolean).join('\n')
- return results
+ return this.constructor.describeUsage
}
usageError (prefix = '') {
diff --git a/deps/npm/lib/cli-entry.js b/deps/npm/lib/cli-entry.js
new file mode 100644
index 00000000000000..aad06e06903856
--- /dev/null
+++ b/deps/npm/lib/cli-entry.js
@@ -0,0 +1,74 @@
+/* eslint-disable max-len */
+
+// Separated out for easier unit testing
+module.exports = async (process, validateEngines) => {
+ // set it here so that regardless of what happens later, we don't
+ // leak any private CLI configs to other programs
+ process.title = 'npm'
+
+ // if npm is called as "npmg" or "npm_g", then run in global mode.
+ if (process.argv[1][process.argv[1].length - 1] === 'g') {
+ process.argv.splice(1, 1, 'npm', '-g')
+ }
+
+ const satisfies = require('semver/functions/satisfies')
+ const exitHandler = require('./utils/exit-handler.js')
+ const Npm = require('./npm.js')
+ const npm = new Npm()
+ exitHandler.setNpm(npm)
+
+ // only log node and npm paths in argv initially since argv can contain sensitive info. a cleaned version will be logged later
+ const log = require('./utils/log-shim.js')
+ log.verbose('cli', process.argv.slice(0, 2).join(' '))
+ log.info('using', 'npm@%s', npm.version)
+ log.info('using', 'node@%s', process.version)
+
+ // At this point we've required a few files and can be pretty sure we dont contain invalid syntax for this version of node. It's possible a lazy require would, but that's unlikely enough that it's not worth catching anymore and we attach the more important exit handlers.
+ validateEngines.off()
+ process.on('uncaughtException', exitHandler)
+ process.on('unhandledRejection', exitHandler)
+
+ // It is now safe to log a warning if they are using a version of node that is not going to fail on syntax errors but is still unsupported and untested and might not work reliably. This is safe to use the logger now which we want since this will show up in the error log too.
+ if (!satisfies(validateEngines.node, validateEngines.engines)) {
+ log.warn('cli', validateEngines.unsupportedMessage)
+ }
+
+ let cmd
+ // Now actually fire up npm and run the command.
+ // This is how to use npm programmatically:
+ try {
+ await npm.load()
+
+ // npm -v
+ if (npm.config.get('version', 'cli')) {
+ npm.output(npm.version)
+ return exitHandler()
+ }
+
+ // npm --versions
+ if (npm.config.get('versions', 'cli')) {
+ npm.argv = ['version']
+ npm.config.set('usage', false, 'cli')
+ }
+
+ cmd = npm.argv.shift()
+ if (!cmd) {
+ npm.output(npm.usage)
+ process.exitCode = 1
+ return exitHandler()
+ }
+
+ await npm.exec(cmd)
+ return exitHandler()
+ } catch (err) {
+ if (err.code === 'EUNKNOWNCOMMAND') {
+ const didYouMean = require('./utils/did-you-mean.js')
+ const suggestions = await didYouMean(npm.localPrefix, cmd)
+ npm.output(`Unknown command: "${cmd}"${suggestions}\n`)
+ npm.output('To see a list of supported npm commands, run:\n npm help')
+ process.exitCode = 1
+ return exitHandler()
+ }
+ return exitHandler(err)
+ }
+}
diff --git a/deps/npm/lib/cli.js b/deps/npm/lib/cli.js
index a393626f08291f..c85ecb65a7005a 100644
--- a/deps/npm/lib/cli.js
+++ b/deps/npm/lib/cli.js
@@ -1,102 +1,4 @@
-/* eslint-disable max-len */
-// Code in this file should work in all conceivably runnable versions of node.
-// A best effort is made to catch syntax errors to give users a good error message if they are using a node version that doesn't allow syntax we are using in other files such as private properties, etc
+const validateEngines = require('./es6/validate-engines.js')
+const cliEntry = require('path').resolve(__dirname, 'cli-entry.js')
-// Separated out for easier unit testing
-module.exports = async process => {
- // set it here so that regardless of what happens later, we don't
- // leak any private CLI configs to other programs
- process.title = 'npm'
-
- // if npm is called as "npmg" or "npm_g", then run in global mode.
- if (process.argv[1][process.argv[1].length - 1] === 'g') {
- process.argv.splice(1, 1, 'npm', '-g')
- }
-
- const nodeVersion = process.version.replace(/-.*$/, '')
- const pkg = require('../package.json')
- const engines = pkg.engines.node
- const npmVersion = `v${pkg.version}`
-
- const unsupportedMessage = `npm ${npmVersion} does not support Node.js ${nodeVersion}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
-
- const brokenMessage = `ERROR: npm ${npmVersion} is known not to run on Node.js ${nodeVersion}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
-
- // Coverage ignored because this is only hit in very unsupported node versions and it's a best effort attempt to show something nice in those cases
- /* istanbul ignore next */
- const syntaxErrorHandler = (err) => {
- if (err instanceof SyntaxError) {
- // eslint-disable-next-line no-console
- console.error(`${brokenMessage}\n\nERROR:`)
- // eslint-disable-next-line no-console
- console.error(err)
- return process.exit(1)
- }
- throw err
- }
-
- process.on('uncaughtException', syntaxErrorHandler)
- process.on('unhandledRejection', syntaxErrorHandler)
-
- const satisfies = require('semver/functions/satisfies')
- const exitHandler = require('./utils/exit-handler.js')
- const Npm = require('./npm.js')
- const npm = new Npm()
- exitHandler.setNpm(npm)
-
- // only log node and npm paths in argv initially since argv can contain sensitive info. a cleaned version will be logged later
- const log = require('./utils/log-shim.js')
- log.verbose('cli', process.argv.slice(0, 2).join(' '))
- log.info('using', 'npm@%s', npm.version)
- log.info('using', 'node@%s', process.version)
-
- // At this point we've required a few files and can be pretty sure we dont contain invalid syntax for this version of node. It's possible a lazy require would, but that's unlikely enough that it's not worth catching anymore and we attach the more important exit handlers.
- process.off('uncaughtException', syntaxErrorHandler)
- process.off('unhandledRejection', syntaxErrorHandler)
- process.on('uncaughtException', exitHandler)
- process.on('unhandledRejection', exitHandler)
-
- // It is now safe to log a warning if they are using a version of node that is not going to fail on syntax errors but is still unsupported and untested and might not work reliably. This is safe to use the logger now which we want since this will show up in the error log too.
- if (!satisfies(nodeVersion, engines)) {
- log.warn('cli', unsupportedMessage)
- }
-
- let cmd
- // Now actually fire up npm and run the command.
- // This is how to use npm programmatically:
- try {
- await npm.load()
-
- // npm -v
- if (npm.config.get('version', 'cli')) {
- npm.output(npm.version)
- return exitHandler()
- }
-
- // npm --versions
- if (npm.config.get('versions', 'cli')) {
- npm.argv = ['version']
- npm.config.set('usage', false, 'cli')
- }
-
- cmd = npm.argv.shift()
- if (!cmd) {
- npm.output(await npm.usage)
- process.exitCode = 1
- return exitHandler()
- }
-
- await npm.exec(cmd)
- return exitHandler()
- } catch (err) {
- if (err.code === 'EUNKNOWNCOMMAND') {
- const didYouMean = require('./utils/did-you-mean.js')
- const suggestions = await didYouMean(npm, npm.localPrefix, cmd)
- npm.output(`Unknown command: "${cmd}"${suggestions}\n`)
- npm.output('To see a list of supported npm commands, run:\n npm help')
- process.exitCode = 1
- return exitHandler()
- }
- return exitHandler(err)
- }
-}
+module.exports = (process) => validateEngines(process, () => require(cliEntry))
diff --git a/deps/npm/lib/commands/access.js b/deps/npm/lib/commands/access.js
index 318151fc81e2c0..99c1264a84eda3 100644
--- a/deps/npm/lib/commands/access.js
+++ b/deps/npm/lib/commands/access.js
@@ -1,8 +1,6 @@
-const path = require('path')
-
const libnpmaccess = require('libnpmaccess')
const npa = require('npm-package-arg')
-const readPackageJson = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const otplease = require('../utils/otplease.js')
@@ -47,7 +45,7 @@ class Access extends BaseCommand {
'revoke []',
]
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return commands
@@ -178,8 +176,8 @@ class Access extends BaseCommand {
async #getPackage (name, requireScope) {
if (!name) {
try {
- const pkg = await readPackageJson(path.resolve(this.npm.prefix, 'package.json'))
- name = pkg.name
+ const { content } = await pkgJson.normalize(this.npm.prefix)
+ name = content.name
} catch (err) {
if (err.code === 'ENOENT') {
throw Object.assign(new Error('no package name given and no package.json found'), {
diff --git a/deps/npm/lib/commands/audit.js b/deps/npm/lib/commands/audit.js
index 7b75ecbf2e0243..500620f2cd01bd 100644
--- a/deps/npm/lib/commands/audit.js
+++ b/deps/npm/lib/commands/audit.js
@@ -1,9 +1,10 @@
-const auditReport = require('npm-audit-report')
+const npmAuditReport = require('npm-audit-report')
const fetch = require('npm-registry-fetch')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const npa = require('npm-package-arg')
const pacote = require('pacote')
const pMap = require('p-map')
+const { sigstore } = require('sigstore')
const ArboristWorkspaceCmd = require('../arborist-cmd.js')
const auditError = require('../utils/audit-error.js')
@@ -37,7 +38,12 @@ class VerifySignatures {
throw new Error('found no installed dependencies to audit')
}
- await Promise.all([...registries].map(registry => this.setKeys({ registry })))
+ const tuf = await sigstore.tuf.client({
+ tufCachePath: this.opts.tufCache,
+ retry: this.opts.retry,
+ timeout: this.opts.timeout,
+ })
+ await Promise.all([...registries].map(registry => this.setKeys({ registry, tuf })))
const progress = log.newItem('verifying registry signatures', edges.size)
const mapper = async (edge) => {
@@ -187,20 +193,42 @@ class VerifySignatures {
return { edges, registries }
}
- async setKeys ({ registry }) {
- const keys = await fetch.json('/-/npm/v1/keys', {
- ...this.npm.flatOptions,
- registry,
- }).then(({ keys: ks }) => ks.map((key) => ({
- ...key,
- pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`,
- }))).catch(err => {
- if (err.code === 'E404' || err.code === 'E400') {
- return null
- } else {
- throw err
- }
- })
+ async setKeys ({ registry, tuf }) {
+ const { host, pathname } = new URL(registry)
+ // Strip any trailing slashes from pathname
+ const regKey = `${host}${pathname.replace(/\/$/, '')}/keys.json`
+ let keys = await tuf.getTarget(regKey)
+ .then((target) => JSON.parse(target))
+ .then(({ keys: ks }) => ks.map((key) => ({
+ ...key,
+ keyid: key.keyId,
+ pemkey: `-----BEGIN PUBLIC KEY-----\n${key.publicKey.rawBytes}\n-----END PUBLIC KEY-----`,
+ expires: key.publicKey.validFor.end || null,
+ }))).catch(err => {
+ if (err.code === 'TUF_FIND_TARGET_ERROR') {
+ return null
+ } else {
+ throw err
+ }
+ })
+
+ // If keys not found in Sigstore TUF repo, fallback to registry keys API
+ if (!keys) {
+ keys = await fetch.json('/-/npm/v1/keys', {
+ ...this.npm.flatOptions,
+ registry,
+ }).then(({ keys: ks }) => ks.map((key) => ({
+ ...key,
+ pemkey: `-----BEGIN PUBLIC KEY-----\n${key.key}\n-----END PUBLIC KEY-----`,
+ }))).catch(err => {
+ if (err.code === 'E404' || err.code === 'E400') {
+ return null
+ } else {
+ throw err
+ }
+ })
+ }
+
if (keys) {
this.keys.set(registry, keys)
}
@@ -384,7 +412,7 @@ class Audit extends ArboristWorkspaceCmd {
static usage = ['[fix|signatures]']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
@@ -429,7 +457,10 @@ class Audit extends ArboristWorkspaceCmd {
} else {
// will throw if there's an error, because this is an audit command
auditError(this.npm, arb.auditReport)
- const result = auditReport(arb.auditReport, opts)
+ const result = npmAuditReport(arb.auditReport, {
+ ...opts,
+ chalk: this.npm.chalk,
+ })
process.exitCode = process.exitCode || result.exitCode
this.npm.output(result.report)
}
diff --git a/deps/npm/lib/commands/cache.js b/deps/npm/lib/commands/cache.js
index 66b432dfc13a66..50bb35e3544dfe 100644
--- a/deps/npm/lib/commands/cache.js
+++ b/deps/npm/lib/commands/cache.js
@@ -73,7 +73,7 @@ class Cache extends BaseCommand {
'verify',
]
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'clean', 'verify', 'ls']
diff --git a/deps/npm/lib/commands/ci.js b/deps/npm/lib/commands/ci.js
index 97c292e8c53896..e662379278e031 100644
--- a/deps/npm/lib/commands/ci.js
+++ b/deps/npm/lib/commands/ci.js
@@ -17,7 +17,6 @@ class CI extends ArboristWorkspaceCmd {
'global-style',
'omit',
'strict-peer-deps',
- 'package-lock',
'foreground-scripts',
'ignore-scripts',
'audit',
diff --git a/deps/npm/lib/commands/completion.js b/deps/npm/lib/commands/completion.js
index efbad9d61001b5..38205ad8fc49ee 100644
--- a/deps/npm/lib/commands/completion.js
+++ b/deps/npm/lib/commands/completion.js
@@ -33,8 +33,9 @@ const fs = require('fs/promises')
const nopt = require('nopt')
const { resolve } = require('path')
+const Npm = require('../npm.js')
const { definitions, shorthands } = require('../utils/config/index.js')
-const { commands, aliases } = require('../utils/cmd-list.js')
+const { commands, aliases, deref } = require('../utils/cmd-list.js')
const configNames = Object.keys(definitions)
const shorthandNames = Object.keys(shorthands)
const allConfs = configNames.concat(shorthandNames)
@@ -48,7 +49,7 @@ class Completion extends BaseCommand {
static name = 'completion'
// completion for the completion command
- async completion (opts) {
+ static async completion (opts) {
if (opts.w > 2) {
return
}
@@ -156,10 +157,14 @@ class Completion extends BaseCommand {
// at this point, if words[1] is some kind of npm command,
// then complete on it.
// otherwise, do nothing
- const impl = await this.npm.cmd(cmd)
- if (impl.completion) {
- const comps = await impl.completion(opts)
- return this.wrap(opts, comps)
+ try {
+ const { completion } = Npm.cmd(cmd)
+ if (completion) {
+ const comps = await completion(opts, this.npm)
+ return this.wrap(opts, comps)
+ }
+ } catch {
+ // it wasnt a valid command, so do nothing
}
}
@@ -267,7 +272,7 @@ const cmdCompl = (opts, npm) => {
return matches
}
- const derefs = new Set([...matches.map(c => npm.deref(c))])
+ const derefs = new Set([...matches.map(c => deref(c))])
if (derefs.size === 1) {
return [...derefs]
}
diff --git a/deps/npm/lib/commands/config.js b/deps/npm/lib/commands/config.js
index dfd44015cd0432..b49cdd648f9d02 100644
--- a/deps/npm/lib/commands/config.js
+++ b/deps/npm/lib/commands/config.js
@@ -7,7 +7,7 @@ const { spawn } = require('child_process')
const { EOL } = require('os')
const ini = require('ini')
const localeCompare = require('@isaacs/string-locale-compare')('en')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const log = require('../utils/log-shim.js')
// These are the configs that we can nerf-dart. Not all of them currently even
@@ -74,7 +74,7 @@ class Config extends BaseCommand {
static skipConfigValidation = true
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv[1] !== 'config') {
argv.unshift('config')
@@ -346,15 +346,15 @@ ${defData}
}
if (!this.npm.global) {
- const pkgPath = resolve(this.npm.prefix, 'package.json')
- const pkg = await rpj(pkgPath).catch(() => ({}))
+ const { content } = await pkgJson.normalize(this.npm.prefix).catch(() => ({ content: {} }))
- if (pkg.publishConfig) {
+ if (content.publishConfig) {
+ const pkgPath = resolve(this.npm.prefix, 'package.json')
msg.push(`; "publishConfig" from ${pkgPath}`)
msg.push('; This set of config values will be used at publish-time.', '')
- const pkgKeys = Object.keys(pkg.publishConfig).sort(localeCompare)
+ const pkgKeys = Object.keys(content.publishConfig).sort(localeCompare)
for (const k of pkgKeys) {
- const v = publicVar(k) ? JSON.stringify(pkg.publishConfig[k]) : '(protected)'
+ const v = publicVar(k) ? JSON.stringify(content.publishConfig[k]) : '(protected)'
msg.push(`${k} = ${v}`)
}
msg.push('')
diff --git a/deps/npm/lib/commands/deprecate.js b/deps/npm/lib/commands/deprecate.js
index 844d5f60a02abe..ada2bac40f2fd6 100644
--- a/deps/npm/lib/commands/deprecate.js
+++ b/deps/npm/lib/commands/deprecate.js
@@ -17,13 +17,13 @@ class Deprecate extends BaseCommand {
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 1) {
return []
}
- const username = await getIdentity(this.npm, this.npm.flatOptions)
- const packages = await libaccess.getPackages(username, this.npm.flatOptions)
+ const username = await getIdentity(npm, npm.flatOptions)
+ const packages = await libaccess.getPackages(username, npm.flatOptions)
return Object.keys(packages)
.filter((name) =>
packages[name] === 'write' &&
diff --git a/deps/npm/lib/commands/diff.js b/deps/npm/lib/commands/diff.js
index 3924166af0a886..64d81d525d79d2 100644
--- a/deps/npm/lib/commands/diff.js
+++ b/deps/npm/lib/commands/diff.js
@@ -5,7 +5,7 @@ const npa = require('npm-package-arg')
const pacote = require('pacote')
const pickManifest = require('npm-pick-manifest')
const log = require('../utils/log-shim')
-const readPackage = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-command.js')
class Diff extends BaseCommand {
@@ -81,7 +81,7 @@ class Diff extends BaseCommand {
async packageName (path) {
let name
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
name = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
@@ -115,7 +115,7 @@ class Diff extends BaseCommand {
let noPackageJson
let pkgName
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
@@ -228,7 +228,7 @@ class Diff extends BaseCommand {
if (semverA && semverB) {
let pkgName
try {
- const pkg = await readPackage(resolve(this.prefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.prefix)
pkgName = pkg.name
} catch (e) {
log.verbose('diff', 'could not read project dir package.json')
diff --git a/deps/npm/lib/commands/dist-tag.js b/deps/npm/lib/commands/dist-tag.js
index bc61a4691e55a0..15f9622d14906c 100644
--- a/deps/npm/lib/commands/dist-tag.js
+++ b/deps/npm/lib/commands/dist-tag.js
@@ -1,10 +1,9 @@
const npa = require('npm-package-arg')
-const path = require('path')
const regFetch = require('npm-registry-fetch')
const semver = require('semver')
const log = require('../utils/log-shim')
const otplease = require('../utils/otplease.js')
-const readPackage = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-command.js')
class DistTag extends BaseCommand {
@@ -20,7 +19,7 @@ class DistTag extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['add', 'rm', 'ls']
@@ -152,7 +151,7 @@ class DistTag extends BaseCommand {
if (this.npm.global) {
throw this.usageError()
}
- const { name } = await readPackage(path.resolve(this.npm.prefix, 'package.json'))
+ const { content: { name } } = await pkgJson.normalize(this.npm.prefix)
if (!name) {
throw this.usageError()
}
diff --git a/deps/npm/lib/commands/edit.js b/deps/npm/lib/commands/edit.js
index a671a5d6bad5d6..fbc7840a39876f 100644
--- a/deps/npm/lib/commands/edit.js
+++ b/deps/npm/lib/commands/edit.js
@@ -38,8 +38,8 @@ class Edit extends BaseCommand {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/exec.js b/deps/npm/lib/commands/exec.js
index a5235c7845851b..ed4b07dc39b4d2 100644
--- a/deps/npm/lib/commands/exec.js
+++ b/deps/npm/lib/commands/exec.js
@@ -54,6 +54,7 @@ class Exec extends BaseCommand {
localBin,
globalBin,
globalDir,
+ chalk,
} = this.npm
const output = this.npm.output.bind(this.npm)
const scriptShell = this.npm.config.get('script-shell') || undefined
@@ -83,6 +84,7 @@ class Exec extends BaseCommand {
globalBin,
globalPath,
output,
+ chalk,
packages,
path: localPrefix,
runPath,
diff --git a/deps/npm/lib/commands/explain.js b/deps/npm/lib/commands/explain.js
index a72514fb978055..403274db68dfaf 100644
--- a/deps/npm/lib/commands/explain.js
+++ b/deps/npm/lib/commands/explain.js
@@ -18,9 +18,9 @@ class Explain extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
+ static async completion (opts, npm) {
const completion = require('../utils/completion/installed-deep.js')
- return completion(this.npm, opts)
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/explore.js b/deps/npm/lib/commands/explore.js
index 0d915cb4c69583..7a03ea4eabd7f6 100644
--- a/deps/npm/lib/commands/explore.js
+++ b/deps/npm/lib/commands/explore.js
@@ -1,9 +1,9 @@
// npm explore [@]
// open a subshell to the package folder.
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const runScript = require('@npmcli/run-script')
-const { join, resolve, relative } = require('path')
+const { join, relative } = require('path')
const log = require('../utils/log-shim.js')
const completion = require('../utils/completion/installed-shallow.js')
const BaseCommand = require('../base-command.js')
@@ -17,8 +17,8 @@ class Explore extends BaseCommand {
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
@@ -38,7 +38,7 @@ class Explore extends BaseCommand {
// the set of arguments, or the shell config, and let @npmcli/run-script
// handle all the escaping and PATH setup stuff.
- const pkg = await rpj(resolve(path, 'package.json')).catch(er => {
+ const { content: pkg } = await pkgJson.normalize(path).catch(er => {
log.error('explore', `It doesn't look like ${pkgname} is installed.`)
throw er
})
diff --git a/deps/npm/lib/commands/fund.js b/deps/npm/lib/commands/fund.js
index 1e8981967fc327..2804d36cd56034 100644
--- a/deps/npm/lib/commands/fund.js
+++ b/deps/npm/lib/commands/fund.js
@@ -36,9 +36,9 @@ class Fund extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
+ static async completion (opts, npm) {
const completion = require('../utils/completion/installed-deep.js')
- return completion(this.npm, opts)
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/get.js b/deps/npm/lib/commands/get.js
index 5e92e85a66382f..4bf5d2caf82645 100644
--- a/deps/npm/lib/commands/get.js
+++ b/deps/npm/lib/commands/get.js
@@ -1,16 +1,18 @@
+const Npm = require('../npm.js')
const BaseCommand = require('../base-command.js')
class Get extends BaseCommand {
static description = 'Get a value from the npm configuration'
static name = 'get'
static usage = ['[ ...] (See `npm config`)']
+ static params = ['long']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- const config = await this.npm.cmd('config')
- return config.completion(opts)
+ static async completion (opts) {
+ const Config = Npm.cmd('config')
+ return Config.completion(opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/help.js b/deps/npm/lib/commands/help.js
index 4b40ef37afa2de..39c580f9a68715 100644
--- a/deps/npm/lib/commands/help.js
+++ b/deps/npm/lib/commands/help.js
@@ -3,6 +3,7 @@ const path = require('path')
const openUrl = require('../utils/open-url.js')
const { glob } = require('glob')
const localeCompare = require('@isaacs/string-locale-compare')('en')
+const { deref } = require('../utils/cmd-list.js')
const globify = pattern => pattern.split('\\').join('/')
const BaseCommand = require('../base-command.js')
@@ -26,11 +27,11 @@ class Help extends BaseCommand {
static usage = [' []']
static params = ['viewer']
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length > 2) {
return []
}
- const g = path.resolve(this.npm.npmRoot, 'man/man[0-9]/*.[0-9]')
+ const g = path.resolve(npm.npmRoot, 'man/man[0-9]/*.[0-9]')
let files = await glob(globify(g))
// preserve glob@8 behavior
files = files.sort((a, b) => a.localeCompare(b, 'en'))
@@ -49,7 +50,7 @@ class Help extends BaseCommand {
const manSearch = /^\d+$/.test(args[0]) ? `man${args.shift()}` : 'man*'
if (!args.length) {
- return this.npm.output(await this.npm.usage)
+ return this.npm.output(this.npm.usage)
}
// npm help foo bar baz: search topics
@@ -58,7 +59,7 @@ class Help extends BaseCommand {
}
// `npm help package.json`
- const arg = (this.npm.deref(args[0]) || args[0]).replace('.json', '-json')
+ const arg = (deref(args[0]) || args[0]).replace('.json', '-json')
// find either section.n or npm-section.n
const f = globify(path.resolve(this.npm.npmRoot, `man/${manSearch}/?(npm-)${arg}.[0-9]*`))
diff --git a/deps/npm/lib/commands/init.js b/deps/npm/lib/commands/init.js
index 1e5661a7840f26..539fba885deef3 100644
--- a/deps/npm/lib/commands/init.js
+++ b/deps/npm/lib/commands/init.js
@@ -3,7 +3,6 @@ const { relative, resolve } = require('path')
const { mkdir } = require('fs/promises')
const initJson = require('init-package-json')
const npa = require('npm-package-arg')
-const rpj = require('read-package-json-fast')
const libexec = require('libnpmexec')
const mapWorkspaces = require('@npmcli/map-workspaces')
const PackageJson = require('@npmcli/package-json')
@@ -54,7 +53,7 @@ class Init extends BaseCommand {
// reads package.json for the top-level folder first, by doing this we
// ensure the command throw if no package.json is found before trying
// to create a workspace package.json file or its folders
- const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json')).catch((err) => {
+ const { content: pkg } = await PackageJson.normalize(this.npm.localPrefix).catch(err => {
if (err.code === 'ENOENT') {
log.warn('Missing package.json. Try with `--include-workspace-root`.')
}
@@ -120,11 +119,11 @@ class Init extends BaseCommand {
}
const newArgs = [packageName, ...otherArgs]
- const { color } = this.npm.flatOptions
const {
flatOptions,
localBin,
globalBin,
+ chalk,
} = this.npm
const output = this.npm.output.bind(this.npm)
const runPath = path
@@ -134,10 +133,10 @@ class Init extends BaseCommand {
await libexec({
...flatOptions,
args: newArgs,
- color,
localBin,
globalBin,
output,
+ chalk,
path,
runPath,
scriptShell,
@@ -217,7 +216,7 @@ class Init extends BaseCommand {
// translate workspaces paths into an array containing workspaces names
const workspaces = []
for (const path of workspacesPaths) {
- const { name } = await rpj(resolve(path, 'package.json')).catch(() => ({}))
+ const { content: { name } } = await PackageJson.normalize(path).catch(() => ({ content: {} }))
if (name) {
workspaces.push(name)
diff --git a/deps/npm/lib/commands/install.js b/deps/npm/lib/commands/install.js
index 99f5b326f7b2bf..2bfd20a72658f7 100644
--- a/deps/npm/lib/commands/install.js
+++ b/deps/npm/lib/commands/install.js
@@ -25,6 +25,7 @@ class Install extends ArboristWorkspaceCmd {
'global-style',
'omit',
'strict-peer-deps',
+ 'prefer-dedupe',
'package-lock',
'foreground-scripts',
'ignore-scripts',
@@ -37,7 +38,7 @@ class Install extends ArboristWorkspaceCmd {
static usage = ['[ ...]']
- async completion (opts) {
+ static async completion (opts) {
const { partialWord } = opts
// install can complete to a folder with a package.json, or any package.
// if it has a slash, then it's gotta be a folder
diff --git a/deps/npm/lib/commands/link.js b/deps/npm/lib/commands/link.js
index 61ae5a98088684..a81450a247ed64 100644
--- a/deps/npm/lib/commands/link.js
+++ b/deps/npm/lib/commands/link.js
@@ -4,7 +4,7 @@ const readdir = util.promisify(fs.readdir)
const { resolve } = require('path')
const npa = require('npm-package-arg')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const semver = require('semver')
const reifyFinish = require('../utils/reify-finish.js')
@@ -35,8 +35,8 @@ class Link extends ArboristWorkspaceCmd {
...super.params,
]
- async completion (opts) {
- const dir = this.npm.globalDir
+ static async completion (opts, npm) {
+ const dir = npm.globalDir
const files = await readdir(dir)
return files.filter(f => !/^[._-]/.test(f))
}
@@ -96,11 +96,12 @@ class Link extends ArboristWorkspaceCmd {
const names = []
for (const a of args) {
const arg = npa(a)
- names.push(
- arg.type === 'directory'
- ? (await rpj(resolve(arg.fetchSpec, 'package.json'))).name
- : arg.name
- )
+ if (arg.type === 'directory') {
+ const { content } = await pkgJson.normalize(arg.fetchSpec)
+ names.push(content.name)
+ } else {
+ names.push(arg.name)
+ }
}
// npm link should not save=true by default unless you're
diff --git a/deps/npm/lib/commands/ls.js b/deps/npm/lib/commands/ls.js
index eb9114802d5e0a..92300b1c404a35 100644
--- a/deps/npm/lib/commands/ls.js
+++ b/deps/npm/lib/commands/ls.js
@@ -40,9 +40,9 @@ class LS extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
+ static async completion (opts, npm) {
const completion = require('../utils/completion/installed-deep.js')
- return completion(this.npm, opts)
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/org.js b/deps/npm/lib/commands/org.js
index 575ff75e2a6cf3..1f32d41ff73068 100644
--- a/deps/npm/lib/commands/org.js
+++ b/deps/npm/lib/commands/org.js
@@ -14,7 +14,7 @@ class Org extends BaseCommand {
static params = ['registry', 'otp', 'json', 'parseable']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
return ['set', 'rm', 'ls']
diff --git a/deps/npm/lib/commands/owner.js b/deps/npm/lib/commands/owner.js
index 3a997db800db7d..5b54dd41f3d607 100644
--- a/deps/npm/lib/commands/owner.js
+++ b/deps/npm/lib/commands/owner.js
@@ -3,14 +3,13 @@ const npmFetch = require('npm-registry-fetch')
const pacote = require('pacote')
const log = require('../utils/log-shim')
const otplease = require('../utils/otplease.js')
-const readPackageJsonFast = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-command.js')
-const { resolve } = require('path')
-const readJson = async (pkg) => {
+const readJson = async (path) => {
try {
- const json = await readPackageJsonFast(pkg)
- return json
+ const { content } = await pkgJson.normalize(path)
+ return content
} catch {
return {}
}
@@ -35,7 +34,7 @@ class Owner extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length > 3) {
return []
@@ -51,17 +50,17 @@ class Owner extends BaseCommand {
// reaches registry in order to autocomplete rm
if (argv[2] === 'rm') {
- if (this.npm.global) {
+ if (npm.global) {
return []
}
- const { name } = await readJson(resolve(this.npm.prefix, 'package.json'))
+ const { name } = await readJson(npm.prefix)
if (!name) {
return []
}
const spec = npa(name)
const data = await pacote.packument(spec, {
- ...this.npm.flatOptions,
+ ...npm.flatOptions,
fullMetadata: true,
})
if (data && data.maintainers && data.maintainers.length) {
@@ -130,7 +129,7 @@ class Owner extends BaseCommand {
if (this.npm.global) {
throw this.usageError()
}
- const { name } = await readJson(resolve(prefix, 'package.json'))
+ const { name } = await readJson(prefix)
if (!name) {
throw this.usageError()
}
diff --git a/deps/npm/lib/commands/profile.js b/deps/npm/lib/commands/profile.js
index 4fba1209e03350..a7d4ac2f29fbe7 100644
--- a/deps/npm/lib/commands/profile.js
+++ b/deps/npm/lib/commands/profile.js
@@ -53,7 +53,7 @@ class Profile extends BaseCommand {
'otp',
]
- async completion (opts) {
+ static async completion (opts) {
var argv = opts.conf.argv.remain
if (!argv[2]) {
diff --git a/deps/npm/lib/commands/publish.js b/deps/npm/lib/commands/publish.js
index 8befbc5ca34cec..54707278f96918 100644
--- a/deps/npm/lib/commands/publish.js
+++ b/deps/npm/lib/commands/publish.js
@@ -1,4 +1,3 @@
-const util = require('util')
const log = require('../utils/log-shim.js')
const semver = require('semver')
const pack = require('libnpmpack')
@@ -17,11 +16,7 @@ const { getContents, logTar } = require('../utils/tar.js')
// revisit this at some point, and have a minimal set that's a SemVer-major
// change that ought to get a RFC written on it.
const { flatten } = require('../utils/config/index.js')
-
-// this is the only case in the CLI where we want to use the old full slow
-// 'read-package-json' module, because we want to pull in all the defaults and
-// metadata, like git sha's and default scripts and all that.
-const readJson = util.promisify(require('read-package-json'))
+const pkgJson = require('@npmcli/package-json')
const BaseCommand = require('../base-command.js')
class Publish extends BaseCommand {
@@ -204,7 +199,9 @@ class Publish extends BaseCommand {
async getManifest (spec, opts) {
let manifest
if (spec.type === 'directory') {
- manifest = await readJson(`${spec.fetchSpec}/package.json`)
+ // Prepare is the special function for publishing, different than normalize
+ const { content } = await pkgJson.prepare(spec.fetchSpec)
+ manifest = content
} else {
manifest = await pacote.manifest(spec, {
...opts,
diff --git a/deps/npm/lib/commands/rebuild.js b/deps/npm/lib/commands/rebuild.js
index 527447e4279175..8af96f725555cb 100644
--- a/deps/npm/lib/commands/rebuild.js
+++ b/deps/npm/lib/commands/rebuild.js
@@ -18,9 +18,9 @@ class Rebuild extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
+ static async completion (opts, npm) {
const completion = require('../utils/completion/installed-deep.js')
- return completion(this.npm, opts)
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js
index e1bce0e52a5132..13efdde750a825 100644
--- a/deps/npm/lib/commands/run-script.js
+++ b/deps/npm/lib/commands/run-script.js
@@ -1,7 +1,6 @@
-const { resolve } = require('path')
const runScript = require('@npmcli/run-script')
const { isServerPackage } = runScript
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const log = require('../utils/log-shim.js')
const didYouMean = require('../utils/did-you-mean.js')
const { isWindowsShell } = require('../utils/is-windows.js')
@@ -36,12 +35,11 @@ class RunScript extends BaseCommand {
static ignoreImplicitWorkspace = false
static isShellout = true
- async completion (opts) {
+ static async completion (opts, npm) {
const argv = opts.conf.argv.remain
if (argv.length === 2) {
- // find the script name
- const json = resolve(this.npm.localPrefix, 'package.json')
- const { scripts = {} } = await rpj(json).catch(er => ({}))
+ const { content: { scripts = {} } } = await pkgJson.normalize(npm.localPrefix)
+ .catch(er => ({ content: {} }))
if (opts.isFish) {
return Object.keys(scripts).map(s => `${s}\t${scripts[s].slice(0, 30)}`)
}
@@ -70,7 +68,10 @@ class RunScript extends BaseCommand {
// null value
const scriptShell = this.npm.config.get('script-shell') || undefined
- pkg = pkg || (await rpj(`${path}/package.json`))
+ if (!pkg) {
+ const { content } = await pkgJson.normalize(path)
+ pkg = content
+ }
const { scripts = {} } = pkg
if (event === 'restart' && !scripts.restart) {
@@ -89,7 +90,7 @@ class RunScript extends BaseCommand {
return
}
- const suggestions = await didYouMean(this.npm, path, event)
+ const suggestions = await didYouMean(path, event)
throw new Error(
`Missing script: "${event}"${suggestions}\n\nTo see a list of scripts, run:\n npm run`
)
@@ -126,8 +127,8 @@ class RunScript extends BaseCommand {
}
async list (args, path) {
- path = path || this.npm.localPrefix
- const { scripts, name, _id } = await rpj(`${path}/package.json`)
+ /* eslint-disable-next-line max-len */
+ const { content: { scripts, name, _id } } = await pkgJson.normalize(path || this.npm.localPrefix)
const pkgid = _id || name
if (!scripts) {
@@ -197,7 +198,7 @@ class RunScript extends BaseCommand {
await this.setWorkspaces()
for (const workspacePath of this.workspacePaths) {
- const pkg = await rpj(`${workspacePath}/package.json`)
+ const { content: pkg } = await pkgJson.normalize(workspacePath)
const runResult = await this.run(args, {
path: workspacePath,
pkg,
@@ -236,7 +237,7 @@ class RunScript extends BaseCommand {
if (this.npm.config.get('json')) {
const res = {}
for (const workspacePath of this.workspacePaths) {
- const { scripts, name } = await rpj(`${workspacePath}/package.json`)
+ const { content: { scripts, name } } = await pkgJson.normalize(workspacePath)
res[name] = { ...scripts }
}
this.npm.output(JSON.stringify(res, null, 2))
@@ -245,7 +246,7 @@ class RunScript extends BaseCommand {
if (this.npm.config.get('parseable')) {
for (const workspacePath of this.workspacePaths) {
- const { scripts, name } = await rpj(`${workspacePath}/package.json`)
+ const { content: { scripts, name } } = await pkgJson.normalize(workspacePath)
for (const [script, cmd] of Object.entries(scripts || {})) {
this.npm.output(`${name}:${script}:${cmd}`)
}
diff --git a/deps/npm/lib/commands/set.js b/deps/npm/lib/commands/set.js
index b650026a599a96..f315d183845c5e 100644
--- a/deps/npm/lib/commands/set.js
+++ b/deps/npm/lib/commands/set.js
@@ -1,15 +1,18 @@
+const Npm = require('../npm.js')
const BaseCommand = require('../base-command.js')
class Set extends BaseCommand {
static description = 'Set a value in the npm configuration'
static name = 'set'
static usage = ['= [= ...] (See `npm config`)']
+ static params = ['global', 'location']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return this.npm.cmd('config').completion(opts)
+ static async completion (opts) {
+ const Config = Npm.cmd('config')
+ return Config.completion(opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/team.js b/deps/npm/lib/commands/team.js
index 2d4fc663715e4e..3c6cf305a6e5f9 100644
--- a/deps/npm/lib/commands/team.js
+++ b/deps/npm/lib/commands/team.js
@@ -24,7 +24,7 @@ class Team extends BaseCommand {
static ignoreImplicitWorkspace = false
- async completion (opts) {
+ static async completion (opts) {
const { conf: { argv: { remain: argv } } } = opts
const subcommands = ['create', 'destroy', 'add', 'rm', 'ls']
diff --git a/deps/npm/lib/commands/token.js b/deps/npm/lib/commands/token.js
index bc2e4f3796364d..c24684b3dd6143 100644
--- a/deps/npm/lib/commands/token.js
+++ b/deps/npm/lib/commands/token.js
@@ -14,7 +14,7 @@ class Token extends BaseCommand {
static usage = ['list', 'revoke ', 'create [--read-only] [--cidr=list]']
static params = ['read-only', 'cidr', 'registry', 'otp']
- async completion (opts) {
+ static async completion (opts) {
const argv = opts.conf.argv.remain
const subcommands = ['list', 'revoke', 'create']
if (argv.length === 2) {
diff --git a/deps/npm/lib/commands/uninstall.js b/deps/npm/lib/commands/uninstall.js
index e5373119ec757a..07775efb9cf2f1 100644
--- a/deps/npm/lib/commands/uninstall.js
+++ b/deps/npm/lib/commands/uninstall.js
@@ -1,5 +1,5 @@
const { resolve } = require('path')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const reifyFinish = require('../utils/reify-finish.js')
const completion = require('../utils/completion/installed-shallow.js')
@@ -8,14 +8,14 @@ const ArboristWorkspaceCmd = require('../arborist-cmd.js')
class Uninstall extends ArboristWorkspaceCmd {
static description = 'Remove a package'
static name = 'uninstall'
- static params = ['save', ...super.params]
+ static params = ['save', 'global', ...super.params]
static usage = ['[<@scope>/]...']
static ignoreImplicitWorkspace = false
// TODO
/* istanbul ignore next */
- async completion (opts) {
- return completion(this.npm, opts)
+ static async completion (opts, npm) {
+ return completion(npm, opts)
}
async exec (args) {
@@ -24,7 +24,7 @@ class Uninstall extends ArboristWorkspaceCmd {
throw new Error('Must provide a package name to remove')
} else {
try {
- const pkg = await rpj(resolve(this.npm.localPrefix, 'package.json'))
+ const { content: pkg } = await pkgJson.normalize(this.npm.localPrefix)
args.push(pkg.name)
} catch (er) {
if (er.code !== 'ENOENT' && er.code !== 'ENOTDIR') {
diff --git a/deps/npm/lib/commands/unpublish.js b/deps/npm/lib/commands/unpublish.js
index f1bcded192e5ad..66985297b9574b 100644
--- a/deps/npm/lib/commands/unpublish.js
+++ b/deps/npm/lib/commands/unpublish.js
@@ -2,9 +2,7 @@ const libaccess = require('libnpmaccess')
const libunpub = require('libnpmpublish').unpublish
const npa = require('npm-package-arg')
const npmFetch = require('npm-registry-fetch')
-const path = require('path')
-const util = require('util')
-const readJson = util.promisify(require('read-package-json'))
+const pkgJson = require('@npmcli/package-json')
const { flatten } = require('../utils/config/index.js')
const getIdentity = require('../utils/get-identity.js')
@@ -24,7 +22,7 @@ class Unpublish extends BaseCommand {
static workspaces = true
static ignoreImplicitWorkspace = false
- async getKeysOfVersions (name, opts) {
+ static async getKeysOfVersions (name, opts) {
const pkgUri = npa(name).escapedName
const json = await npmFetch.json(`${pkgUri}?write=true`, {
...opts,
@@ -33,15 +31,15 @@ class Unpublish extends BaseCommand {
return Object.keys(json.versions)
}
- async completion (args) {
+ static async completion (args, npm) {
const { partialWord, conf } = args
if (conf.argv.remain.length >= 3) {
return []
}
- const opts = { ...this.npm.flatOptions }
- const username = await getIdentity(this.npm, { ...opts }).catch(() => null)
+ const opts = { ...npm.flatOptions }
+ const username = await getIdentity(npm, { ...opts }).catch(() => null)
if (!username) {
return []
}
@@ -96,8 +94,8 @@ class Unpublish extends BaseCommand {
let manifest
let manifestErr
try {
- const pkgJson = path.join(this.npm.localPrefix, 'package.json')
- manifest = await readJson(pkgJson)
+ const { content } = await pkgJson.prepare(this.npm.localPrefix)
+ manifest = content
} catch (err) {
manifestErr = err
}
@@ -107,7 +105,7 @@ class Unpublish extends BaseCommand {
if (manifest && manifest.name === spec.name && manifest.publishConfig) {
flatten(manifest.publishConfig, opts)
}
- const versions = await this.getKeysOfVersions(spec.name, opts)
+ const versions = await Unpublish.getKeysOfVersions(spec.name, opts)
if (versions.length === 1 && !force) {
throw this.usageError(LAST_REMAINING_VERSION_ERROR)
}
diff --git a/deps/npm/lib/commands/update.js b/deps/npm/lib/commands/update.js
index 26be5ad681983b..caa69dd317ca6b 100644
--- a/deps/npm/lib/commands/update.js
+++ b/deps/npm/lib/commands/update.js
@@ -31,9 +31,9 @@ class Update extends ArboristWorkspaceCmd {
// TODO
/* istanbul ignore next */
- async completion (opts) {
+ static async completion (opts, npm) {
const completion = require('../utils/completion/installed-deep.js')
- return completion(this.npm, opts)
+ return completion(npm, opts)
}
async exec (args) {
diff --git a/deps/npm/lib/commands/version.js b/deps/npm/lib/commands/version.js
index a5232836717917..029a6fdd3101e4 100644
--- a/deps/npm/lib/commands/version.js
+++ b/deps/npm/lib/commands/version.js
@@ -28,7 +28,7 @@ class Version extends BaseCommand {
/* eslint-disable-next-line max-len */
static usage = ['[ | major | minor | patch | premajor | preminor | prepatch | prerelease | from-git]']
- async completion (opts) {
+ static async completion (opts) {
const {
conf: {
argv: { remain },
diff --git a/deps/npm/lib/commands/view.js b/deps/npm/lib/commands/view.js
index bbe7dcdd18bbf7..f118184124db97 100644
--- a/deps/npm/lib/commands/view.js
+++ b/deps/npm/lib/commands/view.js
@@ -29,7 +29,7 @@ class View extends BaseCommand {
static ignoreImplicitWorkspace = false
static usage = ['[] [[.subfield]...]']
- async completion (opts) {
+ static async completion (opts, npm) {
if (opts.conf.argv.remain.length <= 2) {
// There used to be registry completion here, but it stopped
// making sense somewhere around 50,000 packages on the registry
@@ -37,13 +37,13 @@ class View extends BaseCommand {
}
// have the package, get the fields
const config = {
- ...this.npm.flatOptions,
+ ...npm.flatOptions,
fullMetadata: true,
preferOnline: true,
}
const spec = npa(opts.conf.argv.remain[2])
const pckmnt = await packument(spec, config)
- const defaultTag = this.npm.config.get('tag')
+ const defaultTag = npm.config.get('tag')
const dv = pckmnt.versions[pckmnt['dist-tags'][defaultTag]]
pckmnt.versions = Object.keys(pckmnt.versions).sort(semver.compareLoose)
diff --git a/deps/npm/lib/es6/validate-engines.js b/deps/npm/lib/es6/validate-engines.js
new file mode 100644
index 00000000000000..0eaa549fc3702d
--- /dev/null
+++ b/deps/npm/lib/es6/validate-engines.js
@@ -0,0 +1,49 @@
+// This is separate to indicate that it should contain code we expect to work in
+// all versions of node >= 6. This is a best effort to catch syntax errors to
+// give users a good error message if they are using a node version that doesn't
+// allow syntax we are using such as private properties, etc. This file is
+// linted with ecmaVersion=6 so we don't use invalid syntax, which is set in the
+// .eslintrc.local.json file
+
+const { engines: { node: engines }, version } = require('../../package.json')
+const npm = `v${version}`
+
+module.exports = (process, getCli) => {
+ const node = process.version.replace(/-.*$/, '')
+
+ /* eslint-disable-next-line max-len */
+ const unsupportedMessage = `npm ${npm} does not support Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
+
+ /* eslint-disable-next-line max-len */
+ const brokenMessage = `ERROR: npm ${npm} is known not to run on Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.`
+
+ // coverage ignored because this is only hit in very unsupported node versions
+ // and it's a best effort attempt to show something nice in those cases
+ /* istanbul ignore next */
+ const syntaxErrorHandler = (err) => {
+ if (err instanceof SyntaxError) {
+ // eslint-disable-next-line no-console
+ console.error(`${brokenMessage}\n\nERROR:`)
+ // eslint-disable-next-line no-console
+ console.error(err)
+ return process.exit(1)
+ }
+ throw err
+ }
+
+ process.on('uncaughtException', syntaxErrorHandler)
+ process.on('unhandledRejection', syntaxErrorHandler)
+
+ // require this only after setting up the error handlers
+ const cli = getCli()
+ return cli(process, {
+ node,
+ npm,
+ engines,
+ unsupportedMessage,
+ off: () => {
+ process.off('uncaughtException', syntaxErrorHandler)
+ process.off('unhandledRejection', syntaxErrorHandler)
+ },
+ })
+}
diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js
index eebb453dbbacb1..f08ef32c180d91 100644
--- a/deps/npm/lib/npm.js
+++ b/deps/npm/lib/npm.js
@@ -1,10 +1,7 @@
-const EventEmitter = require('events')
const { resolve, dirname, join } = require('path')
const Config = require('@npmcli/config')
-const chalk = require('chalk')
const which = require('which')
const fs = require('fs/promises')
-const abbrev = require('abbrev')
// Patch the global fs module here at the app level
require('graceful-fs').gracefulify(require('fs'))
@@ -18,13 +15,23 @@ const log = require('./utils/log-shim')
const replaceInfo = require('./utils/replace-info.js')
const updateNotifier = require('./utils/update-notifier.js')
const pkg = require('../package.json')
-const { commands, aliases } = require('./utils/cmd-list.js')
+const { deref } = require('./utils/cmd-list.js')
-class Npm extends EventEmitter {
+class Npm {
static get version () {
return pkg.version
}
+ static cmd (c) {
+ const command = deref(c)
+ if (!command) {
+ throw Object.assign(new Error(`Unknown command ${c}`), {
+ code: 'EUNKNOWNCOMMAND',
+ })
+ }
+ return require(`./commands/${command}.js`)
+ }
+
updateNotification = null
loadErr = null
argv = []
@@ -32,15 +39,15 @@ class Npm extends EventEmitter {
#command = null
#runId = new Date().toISOString().replace(/[.:]/g, '_')
#loadPromise = null
- #tmpFolder = null
#title = 'npm'
#argvClean = []
- #chalk = null
- #logChalk = null
- #noColorChalk = new chalk.Instance({ level: 0 })
#npmRoot = null
#warnedNonDashArg = false
+ #chalk = null
+ #logChalk = null
+ #noColorChalk = null
+
#outputBuffer = []
#logFile = new LogFile()
#display = new Display()
@@ -66,7 +73,6 @@ class Npm extends EventEmitter {
// prefix to `npmRoot` since that is the first dir it would encounter when
// doing implicit detection
constructor ({ npmRoot = dirname(__dirname), argv = [], excludeNpmCwd = false } = {}) {
- super()
this.#npmRoot = npmRoot
this.config = new Config({
npmPath: this.#npmRoot,
@@ -82,53 +88,9 @@ class Npm extends EventEmitter {
return this.constructor.version
}
- deref (c) {
- if (!c) {
- return
- }
-
- // Translate camelCase to snake-case (i.e. installTest to install-test)
- if (c.match(/[A-Z]/)) {
- c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
- }
-
- // if they asked for something exactly we are done
- if (commands.includes(c)) {
- return c
- }
-
- // if they asked for a direct alias
- if (aliases[c]) {
- return aliases[c]
- }
-
- const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
-
- // first deref the abbrev, if there is one
- // then resolve any aliases
- // so `npm install-cl` will resolve to `install-clean` then to `ci`
- let a = abbrevs[c]
- while (aliases[a]) {
- a = aliases[a]
- }
- return a
- }
-
- // Get an instantiated npm command
- // npm.command is already taken as the currently running command, a refactor
- // would be needed to change this
- async cmd (cmd) {
- await this.load()
-
- const cmdId = this.deref(cmd)
- if (!cmdId) {
- throw Object.assign(new Error(`Unknown command ${cmd}`), {
- code: 'EUNKNOWNCOMMAND',
- })
- }
-
- const Impl = require(`./commands/${cmdId}.js`)
- const command = new Impl(this)
+ setCmd (cmd) {
+ const Command = Npm.cmd(cmd)
+ const command = new Command(this)
// since 'test', 'start', 'stop', etc. commands re-enter this function
// to call the run-script command, we need to only set it one time.
@@ -141,8 +103,14 @@ class Npm extends EventEmitter {
}
// Call an npm command
+ // TODO: tests are currently the only time the second
+ // parameter of args is used. When called via `lib/cli.js` the config is
+ // loaded and this.argv is set to the remaining command line args. We should
+ // consider testing the CLI the same way it is used and not allow args to be
+ // passed in directly.
async exec (cmd, args = this.argv) {
- const command = await this.cmd(cmd)
+ const command = this.setCmd(cmd)
+
const timeEnd = this.time(`command:${cmd}`)
// this is async but we dont await it, since its ok if it doesnt
@@ -226,6 +194,13 @@ class Npm extends EventEmitter {
await this.time('npm:load:configload', () => this.config.load())
+ const { Chalk, supportsColor, supportsColorStderr } = await import('chalk')
+ this.#noColorChalk = new Chalk({ level: 0 })
+ this.#chalk = this.color ? new Chalk({ level: supportsColor.level })
+ : this.#noColorChalk
+ this.#logChalk = this.logColor ? new Chalk({ level: supportsColorStderr.level })
+ : this.#noColorChalk
+
// mkdir this separately since the logs dir can be set to
// a different location. if this fails, then we don't have
// a cache dir, but we don't want to fail immediately since
@@ -333,20 +308,10 @@ class Npm extends EventEmitter {
}
get chalk () {
- if (!this.#chalk) {
- this.#chalk = new chalk.Instance({
- level: this.color ? chalk.level : 0,
- })
- }
return this.#chalk
}
get logChalk () {
- if (!this.#logChalk) {
- this.#logChalk = new chalk.Instance({
- level: this.logColor ? chalk.stderr.level : 0,
- })
- }
return this.#logChalk
}
@@ -462,15 +427,6 @@ class Npm extends EventEmitter {
return usage(this)
}
- // XXX add logging to see if we actually use this
- get tmp () {
- if (!this.#tmpFolder) {
- const rand = require('crypto').randomBytes(4).toString('hex')
- this.#tmpFolder = `npm-${process.pid}-${rand}`
- }
- return resolve(this.config.get('tmp'), this.#tmpFolder)
- }
-
// output to stdout in a progress bar compatible way
output (...msg) {
log.clearProgress()
diff --git a/deps/npm/lib/utils/cmd-list.js b/deps/npm/lib/utils/cmd-list.js
index e5479139033d57..9bd252bc3facce 100644
--- a/deps/npm/lib/utils/cmd-list.js
+++ b/deps/npm/lib/utils/cmd-list.js
@@ -1,3 +1,5 @@
+const abbrev = require('abbrev')
+
// These correspond to filenames in lib/commands
// Please keep this list sorted alphabetically
const commands = [
@@ -136,7 +138,40 @@ const aliases = {
'add-user': 'adduser',
}
+const deref = (c) => {
+ if (!c) {
+ return
+ }
+
+ // Translate camelCase to snake-case (i.e. installTest to install-test)
+ if (c.match(/[A-Z]/)) {
+ c = c.replace(/([A-Z])/g, m => '-' + m.toLowerCase())
+ }
+
+ // if they asked for something exactly we are done
+ if (commands.includes(c)) {
+ return c
+ }
+
+ // if they asked for a direct alias
+ if (aliases[c]) {
+ return aliases[c]
+ }
+
+ const abbrevs = abbrev(commands.concat(Object.keys(aliases)))
+
+ // first deref the abbrev, if there is one
+ // then resolve any aliases
+ // so `npm install-cl` will resolve to `install-clean` then to `ci`
+ let a = abbrevs[c]
+ while (aliases[a]) {
+ a = aliases[a]
+ }
+ return a
+}
+
module.exports = {
aliases,
commands,
+ deref,
}
diff --git a/deps/npm/lib/utils/config/definition.js b/deps/npm/lib/utils/config/definition.js
index f88d8334cf01fb..54e522c355b93d 100644
--- a/deps/npm/lib/utils/config/definition.js
+++ b/deps/npm/lib/utils/config/definition.js
@@ -13,6 +13,7 @@ const allowed = [
'defaultDescription',
'deprecated',
'description',
+ 'exclusive',
'flatten',
'hint',
'key',
@@ -83,12 +84,15 @@ class Definition {
This value is not exported to the environment for child processes.
`
const deprecated = !this.deprecated ? '' : `* DEPRECATED: ${unindent(this.deprecated)}\n`
+ /* eslint-disable-next-line max-len */
+ const exclusive = !this.exclusive ? '' : `\nThis config can not be used with: \`${this.exclusive.join('`, `')}\``
return wrapAll(`#### \`${this.key}\`
* Default: ${unindent(this.defaultDescription)}
* Type: ${unindent(this.typeDescription)}
${deprecated}
${description}
+${exclusive}
${noEnvExport}`)
}
}
diff --git a/deps/npm/lib/utils/config/definitions.js b/deps/npm/lib/utils/config/definitions.js
index 78c341eabeffa7..f86c3ddff81b5e 100644
--- a/deps/npm/lib/utils/config/definitions.js
+++ b/deps/npm/lib/utils/config/definitions.js
@@ -331,6 +331,7 @@ define('cache', {
flatten (key, obj, flatOptions) {
flatOptions.cache = join(obj.cache, '_cacache')
flatOptions.npxCache = join(obj.cache, '_npx')
+ flatOptions.tufCache = join(obj.cache, '_tuf')
},
})
@@ -1495,8 +1496,6 @@ define('package-lock', {
If set to false, then ignore \`package-lock.json\` files when installing.
This will also prevent _writing_ \`package-lock.json\` if \`save\` is
true.
-
- This configuration does not affect \`npm ci\`.
`,
flatten: (key, obj, flatOptions) => {
flatten(key, obj, flatOptions)
@@ -1547,6 +1546,16 @@ define('parseable', {
flatten,
})
+define('prefer-dedupe', {
+ default: false,
+ type: Boolean,
+ description: `
+ Prefer to deduplicate packages if possible, rather than
+ choosing a newer version of a dependency.
+ `,
+ flatten,
+})
+
define('prefer-offline', {
default: false,
type: Boolean,
@@ -1626,6 +1635,7 @@ define('progress', {
define('provenance', {
default: false,
type: Boolean,
+ exclusive: ['provenance-file'],
description: `
When publishing from a supported cloud CI/CD system, the package will be
publicly linked to where it was built and published from.
@@ -1633,6 +1643,17 @@ define('provenance', {
flatten,
})
+define('provenance-file', {
+ default: null,
+ type: path,
+ hint: '',
+ exclusive: ['provenance'],
+ description: `
+ When publishing, the provenance bundle at the given path will be used.
+ `,
+ flatten,
+})
+
define('proxy', {
default: null,
type: [null, false, url], // allow proxy to be disabled explicitly
diff --git a/deps/npm/lib/utils/did-you-mean.js b/deps/npm/lib/utils/did-you-mean.js
index 10b33d5f83a080..ff3c812b46c3c7 100644
--- a/deps/npm/lib/utils/did-you-mean.js
+++ b/deps/npm/lib/utils/did-you-mean.js
@@ -1,19 +1,19 @@
+const Npm = require('../npm')
const { distance } = require('fastest-levenshtein')
-const readJson = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
const { commands } = require('./cmd-list.js')
-const didYouMean = async (npm, path, scmd) => {
- // const cmd = await npm.cmd(str)
+const didYouMean = async (path, scmd) => {
const close = commands.filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && scmd !== cmd)
let best = []
for (const str of close) {
- const cmd = await npm.cmd(str)
+ const cmd = Npm.cmd(str)
best.push(` npm ${str} # ${cmd.description}`)
}
// We would already be suggesting this in `npm x` so omit them here
const runScripts = ['stop', 'start', 'test', 'restart']
try {
- const { bin, scripts } = await readJson(`${path}/package.json`)
+ const { content: { scripts, bin } } = await pkgJson.normalize(path)
best = best.concat(
Object.keys(scripts || {})
.filter(cmd => distance(scmd, cmd) < scmd.length * 0.4 && !runScripts.includes(cmd))
diff --git a/deps/npm/lib/utils/explain-eresolve.js b/deps/npm/lib/utils/explain-eresolve.js
index ba46f3480adb36..f3c6ae23a479db 100644
--- a/deps/npm/lib/utils/explain-eresolve.js
+++ b/deps/npm/lib/utils/explain-eresolve.js
@@ -44,7 +44,7 @@ const explain = (expl, chalk, depth) => {
}
// generate a full verbose report and tell the user how to fix it
-const report = (expl, chalk, noColor) => {
+const report = (expl, chalk, noColorChalk) => {
const flags = [
expl.strictPeerDeps ? '--no-strict-peer-deps' : '',
'--force',
@@ -61,7 +61,7 @@ to accept an incorrect (and potentially broken) dependency resolution.`
return {
explanation: `${explain(expl, chalk, 4)}\n\n${fix}`,
- file: `# npm resolution error report\n\n${explain(expl, noColor, Infinity)}\n\n${fix}`,
+ file: `# npm resolution error report\n\n${explain(expl, noColorChalk, Infinity)}\n\n${fix}`,
}
}
diff --git a/deps/npm/lib/utils/format-search-stream.js b/deps/npm/lib/utils/format-search-stream.js
index 21929cce7d056b..762dea90859d19 100644
--- a/deps/npm/lib/utils/format-search-stream.js
+++ b/deps/npm/lib/utils/format-search-stream.js
@@ -1,5 +1,6 @@
const { Minipass } = require('minipass')
const columnify = require('columnify')
+const ansiTrim = require('../utils/ansi-trim.js')
// This module consumes package data in the following format:
//
@@ -141,13 +142,13 @@ function highlightSearchTerms (str, terms) {
function normalizePackage (data, opts) {
return {
- name: data.name,
- description: data.description,
- author: data.maintainers.map((m) => `=${m.username}`).join(' '),
+ name: ansiTrim(data.name),
+ description: ansiTrim(data.description),
+ author: data.maintainers.map((m) => `=${ansiTrim(m.username)}`).join(' '),
keywords: Array.isArray(data.keywords)
- ? data.keywords.join(' ')
+ ? data.keywords.map(ansiTrim).join(' ')
: typeof data.keywords === 'string'
- ? data.keywords.replace(/[,\s]+/, ' ')
+ ? ansiTrim(data.keywords.replace(/[,\s]+/, ' '))
: '',
version: data.version,
date: (data.date &&
diff --git a/deps/npm/lib/utils/npm-usage.js b/deps/npm/lib/utils/npm-usage.js
index b04ad33f9dd79f..1bd790ca601bcd 100644
--- a/deps/npm/lib/utils/npm-usage.js
+++ b/deps/npm/lib/utils/npm-usage.js
@@ -8,9 +8,9 @@ const INDENT = 4
const indent = (repeat = INDENT) => ' '.repeat(repeat)
const indentNewline = (repeat) => `\n${indent(repeat)}`
-module.exports = async (npm) => {
+module.exports = (npm) => {
const browser = npm.config.get('viewer') === 'browser' ? ' (in a browser)' : ''
- const allCommands = npm.config.get('long') ? await cmdUsages(npm) : cmdNames()
+ const allCommands = npm.config.get('long') ? cmdUsages(npm.constructor) : cmdNames()
return `npm
@@ -57,13 +57,12 @@ const cmdNames = () => {
return indentNewline() + out.join(indentNewline()).slice(2)
}
-const cmdUsages = async (npm) => {
+const cmdUsages = (Npm) => {
// return a string of :
let maxLen = 0
const set = []
for (const c of commands) {
- const { usage } = await npm.cmd(c)
- set.push([c, usage.split('\n')])
+ set.push([c, Npm.cmd(c).describeUsage.split('\n')])
maxLen = Math.max(maxLen, c.length)
}
diff --git a/deps/npm/lib/utils/reify-output.js b/deps/npm/lib/utils/reify-output.js
index 5ac7fa4b01896b..22036dc8110cfc 100644
--- a/deps/npm/lib/utils/reify-output.js
+++ b/deps/npm/lib/utils/reify-output.js
@@ -116,6 +116,7 @@ const getAuditReport = (npm, report) => {
reporter,
...npm.flatOptions,
auditLevel,
+ chalk: npm.chalk,
})
if (npm.command === 'audit') {
process.exitCode = process.exitCode || res.exitCode
diff --git a/deps/npm/lib/workspaces/get-workspaces.js b/deps/npm/lib/workspaces/get-workspaces.js
index 7065533596000e..59efb0e9f01bef 100644
--- a/deps/npm/lib/workspaces/get-workspaces.js
+++ b/deps/npm/lib/workspaces/get-workspaces.js
@@ -1,7 +1,7 @@
const { resolve, relative } = require('path')
const mapWorkspaces = require('@npmcli/map-workspaces')
const { minimatch } = require('minimatch')
-const rpj = require('read-package-json-fast')
+const pkgJson = require('@npmcli/package-json')
// minimatch wants forward slashes only for glob patterns
const globify = pattern => pattern.split('\\').join('/')
@@ -9,8 +9,8 @@ const globify = pattern => pattern.split('\\').join('/')
// Returns an Map of paths to workspaces indexed by workspace name
// { foo => '/path/to/foo' }
const getWorkspaces = async (filters, { path, includeWorkspaceRoot, relativeFrom }) => {
- // TODO we need a better error to be bubbled up here if this rpj call fails
- const pkg = await rpj(resolve(path, 'package.json'))
+ // TODO we need a better error to be bubbled up here if this call fails
+ const { content: pkg } = await pkgJson.normalize(path)
const workspaces = await mapWorkspaces({ cwd: path, pkg })
let res = new Map()
if (includeWorkspaceRoot) {
diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1
index 8b8d996a119970..96534a0850595a 100644
--- a/deps/npm/man/man1/npm-access.1
+++ b/deps/npm/man/man1/npm-access.1
@@ -1,4 +1,4 @@
-.TH "NPM-ACCESS" "1" "May 2023" "" ""
+.TH "NPM-ACCESS" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-access\fR - Set access level on published packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1
index 4653d85c7f8f8e..3a018dbdb9bc15 100644
--- a/deps/npm/man/man1/npm-adduser.1
+++ b/deps/npm/man/man1/npm-adduser.1
@@ -1,4 +1,4 @@
-.TH "NPM-ADDUSER" "1" "May 2023" "" ""
+.TH "NPM-ADDUSER" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-adduser\fR - Add a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1
index 270c517ef17fc5..da9a483715f697 100644
--- a/deps/npm/man/man1/npm-audit.1
+++ b/deps/npm/man/man1/npm-audit.1
@@ -1,4 +1,4 @@
-.TH "NPM-AUDIT" "1" "May 2023" "" ""
+.TH "NPM-AUDIT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-audit\fR - Run a security audit
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1
index efd8922d66c65a..a5ddd4e79cf712 100644
--- a/deps/npm/man/man1/npm-bugs.1
+++ b/deps/npm/man/man1/npm-bugs.1
@@ -1,4 +1,4 @@
-.TH "NPM-BUGS" "1" "May 2023" "" ""
+.TH "NPM-BUGS" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-bugs\fR - Report bugs for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1
index 2757922d47b37f..5669388f55387d 100644
--- a/deps/npm/man/man1/npm-cache.1
+++ b/deps/npm/man/man1/npm-cache.1
@@ -1,4 +1,4 @@
-.TH "NPM-CACHE" "1" "May 2023" "" ""
+.TH "NPM-CACHE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-cache\fR - Manipulates packages cache
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1
index 4728ef4cb9bf44..05e435ec7b9fcf 100644
--- a/deps/npm/man/man1/npm-ci.1
+++ b/deps/npm/man/man1/npm-ci.1
@@ -1,4 +1,4 @@
-.TH "NPM-CI" "1" "May 2023" "" ""
+.TH "NPM-CI" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-ci\fR - Clean install a project
.SS "Synopsis"
@@ -130,18 +130,6 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co
By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object.
.P
When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure.
-.SS "\fBpackage-lock\fR"
-.RS 0
-.IP \(bu 4
-Default: true
-.IP \(bu 4
-Type: Boolean
-.RE 0
-
-.P
-If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBforeground-scripts\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1
index 18a006978e39b0..d72df722ccd343 100644
--- a/deps/npm/man/man1/npm-completion.1
+++ b/deps/npm/man/man1/npm-completion.1
@@ -1,4 +1,4 @@
-.TH "NPM-COMPLETION" "1" "May 2023" "" ""
+.TH "NPM-COMPLETION" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-completion\fR - Tab Completion for npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1
index a860ea4a9533fc..4b19fa0e473575 100644
--- a/deps/npm/man/man1/npm-config.1
+++ b/deps/npm/man/man1/npm-config.1
@@ -1,4 +1,4 @@
-.TH "NPM-CONFIG" "1" "May 2023" "" ""
+.TH "NPM-CONFIG" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-config\fR - Manage the npm configuration files
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1
index 8cbe55011c070e..56e57db5eb54d0 100644
--- a/deps/npm/man/man1/npm-dedupe.1
+++ b/deps/npm/man/man1/npm-dedupe.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEDUPE" "1" "May 2023" "" ""
+.TH "NPM-DEDUPE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-dedupe\fR - Reduce duplication in the package tree
.SS "Synopsis"
@@ -123,8 +123,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBomit\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1
index f4b4087a428007..1458a880fca9d2 100644
--- a/deps/npm/man/man1/npm-deprecate.1
+++ b/deps/npm/man/man1/npm-deprecate.1
@@ -1,4 +1,4 @@
-.TH "NPM-DEPRECATE" "1" "May 2023" "" ""
+.TH "NPM-DEPRECATE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-deprecate\fR - Deprecate a version of a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1
index 906e1776f5f028..44df189c3910b9 100644
--- a/deps/npm/man/man1/npm-diff.1
+++ b/deps/npm/man/man1/npm-diff.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIFF" "1" "May 2023" "" ""
+.TH "NPM-DIFF" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-diff\fR - The registry diff command
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1
index 50dc8e3908041e..11e501009fb13b 100644
--- a/deps/npm/man/man1/npm-dist-tag.1
+++ b/deps/npm/man/man1/npm-dist-tag.1
@@ -1,4 +1,4 @@
-.TH "NPM-DIST-TAG" "1" "May 2023" "" ""
+.TH "NPM-DIST-TAG" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-dist-tag\fR - Modify package distribution tags
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1
index be212b1f014cae..dd701e97099786 100644
--- a/deps/npm/man/man1/npm-docs.1
+++ b/deps/npm/man/man1/npm-docs.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCS" "1" "May 2023" "" ""
+.TH "NPM-DOCS" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-docs\fR - Open documentation for a package in a web browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1
index 1592de0c32203b..b1d36f31034abb 100644
--- a/deps/npm/man/man1/npm-doctor.1
+++ b/deps/npm/man/man1/npm-doctor.1
@@ -1,4 +1,4 @@
-.TH "NPM-DOCTOR" "1" "May 2023" "" ""
+.TH "NPM-DOCTOR" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-doctor\fR - Check your npm environment
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1
index 1c428d2c2a9f89..140946f3a1d30c 100644
--- a/deps/npm/man/man1/npm-edit.1
+++ b/deps/npm/man/man1/npm-edit.1
@@ -1,4 +1,4 @@
-.TH "NPM-EDIT" "1" "May 2023" "" ""
+.TH "NPM-EDIT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-edit\fR - Edit an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1
index d1260e21844075..d6efae12633f32 100644
--- a/deps/npm/man/man1/npm-exec.1
+++ b/deps/npm/man/man1/npm-exec.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXEC" "1" "May 2023" "" ""
+.TH "NPM-EXEC" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-exec\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1
index d206176d92ba6a..1a32c14c1d3d71 100644
--- a/deps/npm/man/man1/npm-explain.1
+++ b/deps/npm/man/man1/npm-explain.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLAIN" "1" "May 2023" "" ""
+.TH "NPM-EXPLAIN" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-explain\fR - Explain installed packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1
index 39fe060038a849..2e9549c753aa92 100644
--- a/deps/npm/man/man1/npm-explore.1
+++ b/deps/npm/man/man1/npm-explore.1
@@ -1,4 +1,4 @@
-.TH "NPM-EXPLORE" "1" "May 2023" "" ""
+.TH "NPM-EXPLORE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-explore\fR - Browse an installed package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1
index efb00a7eba7b88..13d9d3adebaafa 100644
--- a/deps/npm/man/man1/npm-find-dupes.1
+++ b/deps/npm/man/man1/npm-find-dupes.1
@@ -1,4 +1,4 @@
-.TH "NPM-FIND-DUPES" "1" "May 2023" "" ""
+.TH "NPM-FIND-DUPES" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-find-dupes\fR - Find duplication in the package tree
.SS "Synopsis"
@@ -70,8 +70,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBomit\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1
index fe3adb8a7bca8f..074cd075823dc8 100644
--- a/deps/npm/man/man1/npm-fund.1
+++ b/deps/npm/man/man1/npm-fund.1
@@ -1,4 +1,4 @@
-.TH "NPM-FUND" "1" "May 2023" "" ""
+.TH "NPM-FUND" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-fund\fR - Retrieve funding information
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1
index e46f4b3d2f89c5..1de2840e884e26 100644
--- a/deps/npm/man/man1/npm-help-search.1
+++ b/deps/npm/man/man1/npm-help-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP-SEARCH" "1" "May 2023" "" ""
+.TH "NPM-HELP-SEARCH" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-help-search\fR - Search npm help documentation
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1
index b95a78b18bf959..340db5486a6451 100644
--- a/deps/npm/man/man1/npm-help.1
+++ b/deps/npm/man/man1/npm-help.1
@@ -1,4 +1,4 @@
-.TH "NPM-HELP" "1" "May 2023" "" ""
+.TH "NPM-HELP" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-help\fR - Get help on npm
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1
index 99cc0585ffda58..4dfab8c702c82e 100644
--- a/deps/npm/man/man1/npm-hook.1
+++ b/deps/npm/man/man1/npm-hook.1
@@ -1,4 +1,4 @@
-.TH "NPM-HOOK" "1" "May 2023" "" ""
+.TH "NPM-HOOK" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-hook\fR - Manage registry hooks
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1
index ec4f8c5c1e35fa..d74c57e3266c81 100644
--- a/deps/npm/man/man1/npm-init.1
+++ b/deps/npm/man/man1/npm-init.1
@@ -1,4 +1,4 @@
-.TH "NPM-INIT" "1" "May 2023" "" ""
+.TH "NPM-INIT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-init\fR - Create a package.json file
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1
index 96b310cdf98766..0aac5f1f874954 100644
--- a/deps/npm/man/man1/npm-install-ci-test.1
+++ b/deps/npm/man/man1/npm-install-ci-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-CI-TEST" "1" "May 2023" "" ""
+.TH "NPM-INSTALL-CI-TEST" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests
.SS "Synopsis"
@@ -78,18 +78,6 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co
By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object.
.P
When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure.
-.SS "\fBpackage-lock\fR"
-.RS 0
-.IP \(bu 4
-Default: true
-.IP \(bu 4
-Type: Boolean
-.RE 0
-
-.P
-If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBforeground-scripts\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1
index a403b55b15ba10..b66013372f7be7 100644
--- a/deps/npm/man/man1/npm-install-test.1
+++ b/deps/npm/man/man1/npm-install-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL-TEST" "1" "May 2023" "" ""
+.TH "NPM-INSTALL-TEST" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-install-test\fR - Install package(s) and run tests
.SS "Synopsis"
@@ -121,6 +121,16 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co
By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object.
.P
When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure.
+.SS "\fBprefer-dedupe\fR"
+.RS 0
+.IP \(bu 4
+Default: false
+.IP \(bu 4
+Type: Boolean
+.RE 0
+
+.P
+Prefer to deduplicate packages if possible, rather than choosing a newer version of a dependency.
.SS "\fBpackage-lock\fR"
.RS 0
.IP \(bu 4
@@ -131,8 +141,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBforeground-scripts\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1
index 8359b949f1644e..e8a019ad45e325 100644
--- a/deps/npm/man/man1/npm-install.1
+++ b/deps/npm/man/man1/npm-install.1
@@ -1,4 +1,4 @@
-.TH "NPM-INSTALL" "1" "May 2023" "" ""
+.TH "NPM-INSTALL" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-install\fR - Install a package
.SS "Synopsis"
@@ -483,6 +483,16 @@ If set to \fBtrue\fR, and \fB--legacy-peer-deps\fR is not set, then \fIany\fR co
By default, conflicting \fBpeerDependencies\fR deep in the dependency graph will be resolved using the nearest non-peer dependency specification, even if doing so will result in some packages receiving a peer dependency outside the range set in their package's \fBpeerDependencies\fR object.
.P
When such an override is performed, a warning is printed, explaining the conflict and the packages involved. If \fB--strict-peer-deps\fR is set, then this warning is treated as a failure.
+.SS "\fBprefer-dedupe\fR"
+.RS 0
+.IP \(bu 4
+Default: false
+.IP \(bu 4
+Type: Boolean
+.RE 0
+
+.P
+Prefer to deduplicate packages if possible, rather than choosing a newer version of a dependency.
.SS "\fBpackage-lock\fR"
.RS 0
.IP \(bu 4
@@ -493,8 +503,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBforeground-scripts\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1
index ebe0a7c70d7ae4..38df2396ca9b08 100644
--- a/deps/npm/man/man1/npm-link.1
+++ b/deps/npm/man/man1/npm-link.1
@@ -1,4 +1,4 @@
-.TH "NPM-LINK" "1" "May 2023" "" ""
+.TH "NPM-LINK" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-link\fR - Symlink a package folder
.SS "Synopsis"
@@ -182,8 +182,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBomit\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1
index 61b9c2ae235dd7..f593dbb817ef87 100644
--- a/deps/npm/man/man1/npm-login.1
+++ b/deps/npm/man/man1/npm-login.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGIN" "1" "May 2023" "" ""
+.TH "NPM-LOGIN" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-login\fR - Login to a registry user account
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1
index 546a04044e4eeb..1f3fbb5f40b0f4 100644
--- a/deps/npm/man/man1/npm-logout.1
+++ b/deps/npm/man/man1/npm-logout.1
@@ -1,4 +1,4 @@
-.TH "NPM-LOGOUT" "1" "May 2023" "" ""
+.TH "NPM-LOGOUT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-logout\fR - Log out of the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1
index fe605158b7aaa9..4696fc20bf6d4a 100644
--- a/deps/npm/man/man1/npm-ls.1
+++ b/deps/npm/man/man1/npm-ls.1
@@ -1,4 +1,4 @@
-.TH "NPM-LS" "1" "May 2023" "" ""
+.TH "NPM-LS" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-ls\fR - List installed packages
.SS "Synopsis"
@@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit
.P
.RS 2
.nf
-npm@9.6.7 /path/to/npm
+npm@9.7.1 /path/to/npm
└─┬ init-package-json@0.0.4
└── promzard@0.1.5
.fi
diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1
index 8a69570f2da319..0d1d4458e2f0c5 100644
--- a/deps/npm/man/man1/npm-org.1
+++ b/deps/npm/man/man1/npm-org.1
@@ -1,4 +1,4 @@
-.TH "NPM-ORG" "1" "May 2023" "" ""
+.TH "NPM-ORG" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-org\fR - Manage orgs
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1
index 96527eec8bda05..b1665a772b0b94 100644
--- a/deps/npm/man/man1/npm-outdated.1
+++ b/deps/npm/man/man1/npm-outdated.1
@@ -1,4 +1,4 @@
-.TH "NPM-OUTDATED" "1" "May 2023" "" ""
+.TH "NPM-OUTDATED" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-outdated\fR - Check for outdated packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1
index 68997c98c121b5..16a6febff6c7c6 100644
--- a/deps/npm/man/man1/npm-owner.1
+++ b/deps/npm/man/man1/npm-owner.1
@@ -1,4 +1,4 @@
-.TH "NPM-OWNER" "1" "May 2023" "" ""
+.TH "NPM-OWNER" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-owner\fR - Manage package owners
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1
index 40e1ac2e382466..7803db5d7a8f8a 100644
--- a/deps/npm/man/man1/npm-pack.1
+++ b/deps/npm/man/man1/npm-pack.1
@@ -1,4 +1,4 @@
-.TH "NPM-PACK" "1" "May 2023" "" ""
+.TH "NPM-PACK" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-pack\fR - Create a tarball from a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1
index db6f0a79d6bb92..568a752ae79da4 100644
--- a/deps/npm/man/man1/npm-ping.1
+++ b/deps/npm/man/man1/npm-ping.1
@@ -1,4 +1,4 @@
-.TH "NPM-PING" "1" "May 2023" "" ""
+.TH "NPM-PING" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-ping\fR - Ping npm registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1
index 42e6834e7e55c2..ec10aec156d281 100644
--- a/deps/npm/man/man1/npm-pkg.1
+++ b/deps/npm/man/man1/npm-pkg.1
@@ -1,4 +1,4 @@
-.TH "NPM-PKG" "1" "May 2023" "" ""
+.TH "NPM-PKG" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-pkg\fR - Manages your package.json
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1
index a1d4ee3dbc022b..9b24982794ac1b 100644
--- a/deps/npm/man/man1/npm-prefix.1
+++ b/deps/npm/man/man1/npm-prefix.1
@@ -1,4 +1,4 @@
-.TH "NPM-PREFIX" "1" "May 2023" "" ""
+.TH "NPM-PREFIX" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-prefix\fR - Display prefix
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1
index b4f3595ff6832f..cc5daf398a546e 100644
--- a/deps/npm/man/man1/npm-profile.1
+++ b/deps/npm/man/man1/npm-profile.1
@@ -1,4 +1,4 @@
-.TH "NPM-PROFILE" "1" "May 2023" "" ""
+.TH "NPM-PROFILE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-profile\fR - Change settings on your registry profile
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1
index 98d181fe627348..d724cc5282c7c5 100644
--- a/deps/npm/man/man1/npm-prune.1
+++ b/deps/npm/man/man1/npm-prune.1
@@ -1,4 +1,4 @@
-.TH "NPM-PRUNE" "1" "May 2023" "" ""
+.TH "NPM-PRUNE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-prune\fR - Remove extraneous packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1
index 5f7ccce98d1180..bb315398794607 100644
--- a/deps/npm/man/man1/npm-publish.1
+++ b/deps/npm/man/man1/npm-publish.1
@@ -1,4 +1,4 @@
-.TH "NPM-PUBLISH" "1" "May 2023" "" ""
+.TH "NPM-PUBLISH" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-publish\fR - Publish a package
.SS "Synopsis"
@@ -182,6 +182,20 @@ Type: Boolean
.P
When publishing from a supported cloud CI/CD system, the package will be publicly linked to where it was built and published from.
+.P
+This config can not be used with: \fBprovenance-file\fR
+.SS "\fBprovenance-file\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: Path
+.RE 0
+
+.P
+When publishing, the provenance bundle at the given path will be used.
+.P
+This config can not be used with: \fBprovenance\fR
.SS "See Also"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1
index 0cd80c479cfe2b..84dcbb9775770d 100644
--- a/deps/npm/man/man1/npm-query.1
+++ b/deps/npm/man/man1/npm-query.1
@@ -1,4 +1,4 @@
-.TH "NPM-QUERY" "1" "May 2023" "" ""
+.TH "NPM-QUERY" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-query\fR - Dependency selector query
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1
index 5153e2297755d1..e0f8c8e084c297 100644
--- a/deps/npm/man/man1/npm-rebuild.1
+++ b/deps/npm/man/man1/npm-rebuild.1
@@ -1,4 +1,4 @@
-.TH "NPM-REBUILD" "1" "May 2023" "" ""
+.TH "NPM-REBUILD" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-rebuild\fR - Rebuild a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1
index 960699c6192f36..91d81761c10586 100644
--- a/deps/npm/man/man1/npm-repo.1
+++ b/deps/npm/man/man1/npm-repo.1
@@ -1,4 +1,4 @@
-.TH "NPM-REPO" "1" "May 2023" "" ""
+.TH "NPM-REPO" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-repo\fR - Open package repository page in the browser
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1
index 21ae38d01ccb97..0caa8f7eea4486 100644
--- a/deps/npm/man/man1/npm-restart.1
+++ b/deps/npm/man/man1/npm-restart.1
@@ -1,4 +1,4 @@
-.TH "NPM-RESTART" "1" "May 2023" "" ""
+.TH "NPM-RESTART" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-restart\fR - Restart a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1
index 6a53d9e695a280..e640d17e025d2a 100644
--- a/deps/npm/man/man1/npm-root.1
+++ b/deps/npm/man/man1/npm-root.1
@@ -1,4 +1,4 @@
-.TH "NPM-ROOT" "1" "May 2023" "" ""
+.TH "NPM-ROOT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-root\fR - Display npm root
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1
index 0838720edb31d3..cdd5e4ad7ddb4b 100644
--- a/deps/npm/man/man1/npm-run-script.1
+++ b/deps/npm/man/man1/npm-run-script.1
@@ -1,4 +1,4 @@
-.TH "NPM-RUN-SCRIPT" "1" "May 2023" "" ""
+.TH "NPM-RUN-SCRIPT" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-run-script\fR - Run arbitrary package scripts
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1
index 18024e5e9cddbc..f26ddca63b832d 100644
--- a/deps/npm/man/man1/npm-search.1
+++ b/deps/npm/man/man1/npm-search.1
@@ -1,4 +1,4 @@
-.TH "NPM-SEARCH" "1" "May 2023" "" ""
+.TH "NPM-SEARCH" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-search\fR - Search for packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1
index 312c444488333d..0d641645964db9 100644
--- a/deps/npm/man/man1/npm-shrinkwrap.1
+++ b/deps/npm/man/man1/npm-shrinkwrap.1
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP" "1" "May 2023" "" ""
+.TH "NPM-SHRINKWRAP" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-shrinkwrap\fR - Lock down dependency versions for publication
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1
index f293aa8a346b9b..2bbd7b3e2de063 100644
--- a/deps/npm/man/man1/npm-star.1
+++ b/deps/npm/man/man1/npm-star.1
@@ -1,4 +1,4 @@
-.TH "NPM-STAR" "1" "May 2023" "" ""
+.TH "NPM-STAR" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-star\fR - Mark your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1
index c7812e1e925aee..3f8721a2fcf260 100644
--- a/deps/npm/man/man1/npm-stars.1
+++ b/deps/npm/man/man1/npm-stars.1
@@ -1,4 +1,4 @@
-.TH "NPM-STARS" "1" "May 2023" "" ""
+.TH "NPM-STARS" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-stars\fR - View packages marked as favorites
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1
index 06dbfc0142751f..27c095652c3994 100644
--- a/deps/npm/man/man1/npm-start.1
+++ b/deps/npm/man/man1/npm-start.1
@@ -1,4 +1,4 @@
-.TH "NPM-START" "1" "May 2023" "" ""
+.TH "NPM-START" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-start\fR - Start a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1
index cdc7d807cf6508..14742a96d8a10a 100644
--- a/deps/npm/man/man1/npm-stop.1
+++ b/deps/npm/man/man1/npm-stop.1
@@ -1,4 +1,4 @@
-.TH "NPM-STOP" "1" "May 2023" "" ""
+.TH "NPM-STOP" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-stop\fR - Stop a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1
index 84b82d9a2cb92f..abed5a5d48d008 100644
--- a/deps/npm/man/man1/npm-team.1
+++ b/deps/npm/man/man1/npm-team.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEAM" "1" "May 2023" "" ""
+.TH "NPM-TEAM" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-team\fR - Manage organization teams and team memberships
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1
index 9832f0b5f01c08..7f4945ba882db3 100644
--- a/deps/npm/man/man1/npm-test.1
+++ b/deps/npm/man/man1/npm-test.1
@@ -1,4 +1,4 @@
-.TH "NPM-TEST" "1" "May 2023" "" ""
+.TH "NPM-TEST" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-test\fR - Test a package
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1
index d0593123ac8437..a9aeb2f6c24f53 100644
--- a/deps/npm/man/man1/npm-token.1
+++ b/deps/npm/man/man1/npm-token.1
@@ -1,4 +1,4 @@
-.TH "NPM-TOKEN" "1" "May 2023" "" ""
+.TH "NPM-TOKEN" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-token\fR - Manage your authentication tokens
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1
index 7e0237c768a800..b35a578da663db 100644
--- a/deps/npm/man/man1/npm-uninstall.1
+++ b/deps/npm/man/man1/npm-uninstall.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNINSTALL" "1" "May 2023" "" ""
+.TH "NPM-UNINSTALL" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-uninstall\fR - Remove a package
.SS "Synopsis"
@@ -57,6 +57,25 @@ Save installed packages to a \fBpackage.json\fR file as dependencies.
When used with the \fBnpm rm\fR command, removes the dependency from \fBpackage.json\fR.
.P
Will also prevent writing to \fBpackage-lock.json\fR if set to \fBfalse\fR.
+.SS "\fBglobal\fR"
+.RS 0
+.IP \(bu 4
+Default: false
+.IP \(bu 4
+Type: Boolean
+.RE 0
+
+.P
+Operates in "global" mode, so that packages are installed into the \fBprefix\fR folder instead of the current working directory. See npm help folders for more on the differences in behavior.
+.RS 0
+.IP \(bu 4
+packages are installed into the \fB{prefix}/lib/node_modules\fR folder, instead of the current working directory.
+.IP \(bu 4
+bin files are linked to \fB{prefix}/bin\fR
+.IP \(bu 4
+man pages are linked to \fB{prefix}/share/man\fR
+.RE 0
+
.SS "\fBworkspace\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1
index 21ac3b07226c3a..151d24baa7c8c7 100644
--- a/deps/npm/man/man1/npm-unpublish.1
+++ b/deps/npm/man/man1/npm-unpublish.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNPUBLISH" "1" "May 2023" "" ""
+.TH "NPM-UNPUBLISH" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-unpublish\fR - Remove a package from the registry
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1
index 70242ed19632c3..802ccf68678ed6 100644
--- a/deps/npm/man/man1/npm-unstar.1
+++ b/deps/npm/man/man1/npm-unstar.1
@@ -1,4 +1,4 @@
-.TH "NPM-UNSTAR" "1" "May 2023" "" ""
+.TH "NPM-UNSTAR" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-unstar\fR - Remove an item from your favorite packages
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1
index 82e3ff9b193d1d..787e2ee663875b 100644
--- a/deps/npm/man/man1/npm-update.1
+++ b/deps/npm/man/man1/npm-update.1
@@ -1,4 +1,4 @@
-.TH "NPM-UPDATE" "1" "May 2023" "" ""
+.TH "NPM-UPDATE" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-update\fR - Update packages
.SS "Synopsis"
@@ -239,8 +239,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBforeground-scripts\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1
index 620d6d56fe08fb..3a88228dc2f261 100644
--- a/deps/npm/man/man1/npm-version.1
+++ b/deps/npm/man/man1/npm-version.1
@@ -1,4 +1,4 @@
-.TH "NPM-VERSION" "1" "May 2023" "" ""
+.TH "NPM-VERSION" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-version\fR - Bump a package version
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1
index 5733d82cfcfd6d..50b8dcb5e4cfbd 100644
--- a/deps/npm/man/man1/npm-view.1
+++ b/deps/npm/man/man1/npm-view.1
@@ -1,4 +1,4 @@
-.TH "NPM-VIEW" "1" "May 2023" "" ""
+.TH "NPM-VIEW" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-view\fR - View registry info
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1
index add5aac5a2689a..7bc53ca0d7796b 100644
--- a/deps/npm/man/man1/npm-whoami.1
+++ b/deps/npm/man/man1/npm-whoami.1
@@ -1,4 +1,4 @@
-.TH "NPM-WHOAMI" "1" "May 2023" "" ""
+.TH "NPM-WHOAMI" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm-whoami\fR - Display npm username
.SS "Synopsis"
diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1
index 1fcb52d7b3952b..7e6e660595d87b 100644
--- a/deps/npm/man/man1/npm.1
+++ b/deps/npm/man/man1/npm.1
@@ -1,4 +1,4 @@
-.TH "NPM" "1" "May 2023" "" ""
+.TH "NPM" "1" "June 2023" "" ""
.SH "NAME"
\fBnpm\fR - javascript package manager
.SS "Synopsis"
@@ -12,7 +12,7 @@ npm
Note: This command is unaware of workspaces.
.SS "Version"
.P
-9.6.7
+9.7.1
.SS "Description"
.P
npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently.
diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1
index c392079457ac1d..62545305dc679e 100644
--- a/deps/npm/man/man1/npx.1
+++ b/deps/npm/man/man1/npx.1
@@ -1,4 +1,4 @@
-.TH "NPX" "1" "May 2023" "" ""
+.TH "NPX" "1" "June 2023" "" ""
.SH "NAME"
\fBnpx\fR - Run a command from a local or remote npm package
.SS "Synopsis"
diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5
index 934e3af20f757e..c4ddf51f87cade 100644
--- a/deps/npm/man/man5/folders.5
+++ b/deps/npm/man/man5/folders.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "May 2023" "" ""
+.TH "FOLDERS" "5" "June 2023" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5
index de38edb1b1b4a5..096c705950c113 100644
--- a/deps/npm/man/man5/install.5
+++ b/deps/npm/man/man5/install.5
@@ -1,4 +1,4 @@
-.TH "INSTALL" "5" "May 2023" "" ""
+.TH "INSTALL" "5" "June 2023" "" ""
.SH "NAME"
\fBinstall\fR - Download and install node and npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5
index 934e3af20f757e..c4ddf51f87cade 100644
--- a/deps/npm/man/man5/npm-global.5
+++ b/deps/npm/man/man5/npm-global.5
@@ -1,4 +1,4 @@
-.TH "FOLDERS" "5" "May 2023" "" ""
+.TH "FOLDERS" "5" "June 2023" "" ""
.SH "NAME"
\fBfolders\fR - Folder Structures Used by npm
.SS "Description"
diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5
index c646afa88dfedc..36e22713c4ce82 100644
--- a/deps/npm/man/man5/npm-json.5
+++ b/deps/npm/man/man5/npm-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "May 2023" "" ""
+.TH "PACKAGE.JSON" "5" "June 2023" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5
index f3ab3eda6aa409..3ac345a3452cc2 100644
--- a/deps/npm/man/man5/npm-shrinkwrap-json.5
+++ b/deps/npm/man/man5/npm-shrinkwrap-json.5
@@ -1,4 +1,4 @@
-.TH "NPM-SHRINKWRAP.JSON" "5" "May 2023" "" ""
+.TH "NPM-SHRINKWRAP.JSON" "5" "June 2023" "" ""
.SH "NAME"
\fBnpm-shrinkwrap.json\fR - A publishable lockfile
.SS "Description"
diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5
index c04212f9e21d96..8aaab33cd74448 100644
--- a/deps/npm/man/man5/npmrc.5
+++ b/deps/npm/man/man5/npmrc.5
@@ -1,4 +1,4 @@
-.TH "NPMRC" "5" "May 2023" "" ""
+.TH "NPMRC" "5" "June 2023" "" ""
.SH "NAME"
\fBnpmrc\fR - The npm config files
.SS "Description"
diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5
index c646afa88dfedc..36e22713c4ce82 100644
--- a/deps/npm/man/man5/package-json.5
+++ b/deps/npm/man/man5/package-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE.JSON" "5" "May 2023" "" ""
+.TH "PACKAGE.JSON" "5" "June 2023" "" ""
.SH "NAME"
\fBpackage.json\fR - Specifics of npm's package.json handling
.SS "Description"
diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5
index 0f529dc9bbc4ae..ceac876f546a5b 100644
--- a/deps/npm/man/man5/package-lock-json.5
+++ b/deps/npm/man/man5/package-lock-json.5
@@ -1,4 +1,4 @@
-.TH "PACKAGE-LOCK.JSON" "5" "May 2023" "" ""
+.TH "PACKAGE-LOCK.JSON" "5" "June 2023" "" ""
.SH "NAME"
\fBpackage-lock.json\fR - A manifestation of the manifest
.SS "Description"
diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7
index e20d300946a863..cec41d8d3f4b87 100644
--- a/deps/npm/man/man7/config.7
+++ b/deps/npm/man/man7/config.7
@@ -1,4 +1,4 @@
-.TH "CONFIG" "7" "May 2023" "" ""
+.TH "CONFIG" "7" "June 2023" "" ""
.SH "NAME"
\fBconfig\fR - More than you probably want to know about npm configuration
.SS "Description"
@@ -1080,8 +1080,6 @@ Type: Boolean
.P
If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true.
-.P
-This configuration does not affect \fBnpm ci\fR.
.SS "\fBpackage-lock-only\fR"
.RS 0
.IP \(bu 4
@@ -1106,6 +1104,16 @@ Type: Boolean
.P
Output parseable results from commands that write to standard output. For \fBnpm search\fR, this will be tab-separated table format.
+.SS "\fBprefer-dedupe\fR"
+.RS 0
+.IP \(bu 4
+Default: false
+.IP \(bu 4
+Type: Boolean
+.RE 0
+
+.P
+Prefer to deduplicate packages if possible, rather than choosing a newer version of a dependency.
.SS "\fBprefer-offline\fR"
.RS 0
.IP \(bu 4
@@ -1168,6 +1176,20 @@ Type: Boolean
.P
When publishing from a supported cloud CI/CD system, the package will be publicly linked to where it was built and published from.
+.P
+This config can not be used with: \fBprovenance-file\fR
+.SS "\fBprovenance-file\fR"
+.RS 0
+.IP \(bu 4
+Default: null
+.IP \(bu 4
+Type: Path
+.RE 0
+
+.P
+When publishing, the provenance bundle at the given path will be used.
+.P
+This config can not be used with: \fBprovenance\fR
.SS "\fBproxy\fR"
.RS 0
.IP \(bu 4
diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7
index d84c647c93038f..74767ecff53496 100644
--- a/deps/npm/man/man7/dependency-selectors.7
+++ b/deps/npm/man/man7/dependency-selectors.7
@@ -1,4 +1,4 @@
-.TH "QUERYING" "7" "May 2023" "" ""
+.TH "QUERYING" "7" "June 2023" "" ""
.SH "NAME"
\fBQuerying\fR - Dependency Selector Syntax & Querying
.SS "Description"
diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7
index 9d1c568bd7c437..30c2c60da44c93 100644
--- a/deps/npm/man/man7/developers.7
+++ b/deps/npm/man/man7/developers.7
@@ -1,4 +1,4 @@
-.TH "DEVELOPERS" "7" "May 2023" "" ""
+.TH "DEVELOPERS" "7" "June 2023" "" ""
.SH "NAME"
\fBdevelopers\fR - Developer Guide
.SS "Description"
diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7
index f8f0e9a9e0d8ef..62ae6b1a5b1076 100644
--- a/deps/npm/man/man7/logging.7
+++ b/deps/npm/man/man7/logging.7
@@ -1,4 +1,4 @@
-.TH "LOGGING" "7" "May 2023" "" ""
+.TH "LOGGING" "7" "June 2023" "" ""
.SH "NAME"
\fBLogging\fR - Why, What & How We Log
.SS "Description"
diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7
index 7e67161fb6830d..6ebfc55f1526f0 100644
--- a/deps/npm/man/man7/orgs.7
+++ b/deps/npm/man/man7/orgs.7
@@ -1,4 +1,4 @@
-.TH "ORGS" "7" "May 2023" "" ""
+.TH "ORGS" "7" "June 2023" "" ""
.SH "NAME"
\fBorgs\fR - Working with Teams & Orgs
.SS "Description"
diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7
index 21342529c51c1a..0e3ff55c217d5f 100644
--- a/deps/npm/man/man7/package-spec.7
+++ b/deps/npm/man/man7/package-spec.7
@@ -1,4 +1,4 @@
-.TH "PACKAGE-SPEC" "7" "May 2023" "" ""
+.TH "PACKAGE-SPEC" "7" "June 2023" "" ""
.SH "NAME"
\fBpackage-spec\fR - Package name specifier
.SS "Description"
diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7
index b27bdd40295099..db66480d4465d8 100644
--- a/deps/npm/man/man7/registry.7
+++ b/deps/npm/man/man7/registry.7
@@ -1,4 +1,4 @@
-.TH "REGISTRY" "7" "May 2023" "" ""
+.TH "REGISTRY" "7" "June 2023" "" ""
.SH "NAME"
\fBregistry\fR - The JavaScript Package Registry
.SS "Description"
diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7
index 1f9dfcf016aea4..dfd9542fb278c8 100644
--- a/deps/npm/man/man7/removal.7
+++ b/deps/npm/man/man7/removal.7
@@ -1,4 +1,4 @@
-.TH "REMOVAL" "7" "May 2023" "" ""
+.TH "REMOVAL" "7" "June 2023" "" ""
.SH "NAME"
\fBremoval\fR - Cleaning the Slate
.SS "Synopsis"
diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7
index 00f2240aedd385..ac9cd5f29ee605 100644
--- a/deps/npm/man/man7/scope.7
+++ b/deps/npm/man/man7/scope.7
@@ -1,4 +1,4 @@
-.TH "SCOPE" "7" "May 2023" "" ""
+.TH "SCOPE" "7" "June 2023" "" ""
.SH "NAME"
\fBscope\fR - Scoped packages
.SS "Description"
diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7
index 00ae221f4b0901..72012d4dcd48d2 100644
--- a/deps/npm/man/man7/scripts.7
+++ b/deps/npm/man/man7/scripts.7
@@ -1,4 +1,4 @@
-.TH "SCRIPTS" "7" "May 2023" "" ""
+.TH "SCRIPTS" "7" "June 2023" "" ""
.SH "NAME"
\fBscripts\fR - How npm handles the "scripts" field
.SS "Description"
diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7
index 896e40c294c367..68dca1c75aa14b 100644
--- a/deps/npm/man/man7/workspaces.7
+++ b/deps/npm/man/man7/workspaces.7
@@ -1,4 +1,4 @@
-.TH "WORKSPACES" "7" "May 2023" "" ""
+.TH "WORKSPACES" "7" "June 2023" "" ""
.SH "NAME"
\fBworkspaces\fR - Working with workspaces
.SS "Description"
diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json
index bba6a2e9e5c85c..ec98a3c6a69394 100644
--- a/deps/npm/node_modules/@npmcli/arborist/package.json
+++ b/deps/npm/node_modules/@npmcli/arborist/package.json
@@ -41,7 +41,6 @@
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.14.1",
"benchmark": "^2.1.4",
- "chalk": "^4.1.0",
"minify-registry-metadata": "^3.0.0",
"nock": "^13.3.0",
"tap": "^16.3.4",
diff --git a/deps/npm/node_modules/@npmcli/config/lib/index.js b/deps/npm/node_modules/@npmcli/config/lib/index.js
index b7b848dea151c8..84a009851d8a2e 100644
--- a/deps/npm/node_modules/@npmcli/config/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/config/lib/index.js
@@ -575,6 +575,13 @@ class Config {
const v = this.parseField(value, k)
if (where !== 'default') {
this.#checkDeprecated(k, where, obj, [key, value])
+ if (this.definitions[key]?.exclusive) {
+ for (const exclusive of this.definitions[key].exclusive) {
+ if (!this.isDefault(exclusive)) {
+ throw new TypeError(`--${key} can not be provided when using --${exclusive}`)
+ }
+ }
+ }
}
conf.data[k] = v
}
diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json
index e68d5166901451..e0190fc3ac53f6 100644
--- a/deps/npm/node_modules/@npmcli/config/package.json
+++ b/deps/npm/node_modules/@npmcli/config/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/config",
- "version": "6.1.7",
+ "version": "6.2.0",
"files": [
"bin/",
"lib/"
diff --git a/deps/npm/node_modules/@npmcli/git/lib/find.js b/deps/npm/node_modules/@npmcli/git/lib/find.js
index d58f01dbcc16ff..34bd310b88e5d5 100644
--- a/deps/npm/node_modules/@npmcli/git/lib/find.js
+++ b/deps/npm/node_modules/@npmcli/git/lib/find.js
@@ -1,15 +1,15 @@
const is = require('./is.js')
const { dirname } = require('path')
-module.exports = async ({ cwd = process.cwd() } = {}) => {
- if (await is({ cwd })) {
- return cwd
- }
- while (cwd !== dirname(cwd)) {
- cwd = dirname(cwd)
+module.exports = async ({ cwd = process.cwd(), root } = {}) => {
+ while (true) {
if (await is({ cwd })) {
return cwd
}
+ const next = dirname(cwd)
+ if (cwd === root || cwd === next) {
+ return null
+ }
+ cwd = next
}
- return null
}
diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json
index 41c78dddfa3ccc..eeba1c0415788c 100644
--- a/deps/npm/node_modules/@npmcli/git/package.json
+++ b/deps/npm/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/git",
- "version": "4.0.4",
+ "version": "4.1.0",
"main": "lib/index.js",
"files": [
"bin/",
@@ -23,8 +23,7 @@
"template-oss-apply": "template-oss-apply --force"
},
"tap": {
- "check-coverage": true,
- "coverage-map": "map.js",
+ "timeout": 600,
"nyc-arg": [
"--exclude",
"tap-snapshots/**"
@@ -32,7 +31,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
- "@npmcli/template-oss": "4.12.0",
+ "@npmcli/template-oss": "4.15.1",
"npm-package-arg": "^10.0.0",
"slash": "^3.0.0",
"tap": "^16.0.1"
@@ -52,7 +51,7 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "windowsCI": false,
- "version": "4.12.0"
+ "version": "4.15.1",
+ "publish": true
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/index.js b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
index 34e415b45d49fe..756837cdde58a0 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/index.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/index.js
@@ -38,6 +38,7 @@ class PackageJson {
'_attributes',
'bundledDependencies',
'bundleDependencies',
+ 'bundleDependenciesDeleteFalse',
'gypfile',
'serverjs',
'scriptpath',
diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
index bc101cd4fde1b0..9594ef3d7ff4ff 100644
--- a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
+++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js
@@ -3,10 +3,13 @@ const { glob } = require('glob')
const normalizePackageBin = require('npm-normalize-package-bin')
const normalizePackageData = require('normalize-package-data')
const path = require('path')
+const log = require('proc-log')
+const git = require('@npmcli/git')
-const normalize = async (pkg, { strict, steps }) => {
+const normalize = async (pkg, { strict, steps, root }) => {
const data = pkg.content
const scripts = data.scripts || {}
+ const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
// remove attributes that start with "_"
if (steps.includes('_attributes')) {
@@ -20,7 +23,7 @@ const normalize = async (pkg, { strict, steps }) => {
// build the "_id" attribute
if (steps.includes('_id')) {
if (data.name && data.version) {
- data._id = `${data.name}@${data.version}`
+ data._id = pkgId
}
}
@@ -34,7 +37,9 @@ const normalize = async (pkg, { strict, steps }) => {
// expand "bundleDependencies: true or translate from object"
if (steps.includes('bundleDependencies')) {
const bd = data.bundleDependencies
- if (bd === true) {
+ if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
+ data.bundleDependencies = []
+ } else if (bd === true) {
data.bundleDependencies = Object.keys(data.dependencies || {})
} else if (bd && typeof bd === 'object') {
if (!Array.isArray(bd)) {
@@ -158,7 +163,7 @@ const normalize = async (pkg, { strict, steps }) => {
}
// expand "directories.bin"
- if (steps.includes('binDir') && data.directories?.bin) {
+ if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
const binsDir = path.resolve(pkg.path, path.join('.', path.join('/', data.directories.bin)))
const bins = await glob('**', { cwd: binsDir })
data.bin = bins.reduce((acc, binFile) => {
@@ -174,17 +179,20 @@ const normalize = async (pkg, { strict, steps }) => {
// populate "gitHead" attribute
if (steps.includes('gitHead') && !data.gitHead) {
+ const gitRoot = await git.find({ cwd: pkg.path, root })
let head
- try {
- head = await fs.readFile(path.resolve(pkg.path, '.git/HEAD'), 'utf8')
- } catch (err) {
+ if (gitRoot) {
+ try {
+ head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
+ } catch (err) {
// do nothing
+ }
}
let headData
if (head) {
if (head.startsWith('ref: ')) {
const headRef = head.replace(/^ref: /, '').trim()
- const headFile = path.resolve(pkg.path, '.git', headRef)
+ const headFile = path.resolve(gitRoot, '.git', headRef)
try {
headData = await fs.readFile(headFile, 'utf8')
headData = headData.replace(/^ref: /, '').trim()
@@ -192,7 +200,7 @@ const normalize = async (pkg, { strict, steps }) => {
// do nothing
}
if (!headData) {
- const packFile = path.resolve(pkg.path, '.git/packed-refs')
+ const packFile = path.resolve(gitRoot, '.git/packed-refs')
try {
let refs = await fs.readFile(packFile, 'utf8')
if (refs) {
@@ -271,11 +279,11 @@ const normalize = async (pkg, { strict, steps }) => {
// in normalize-package-data if it had access to the file path.
if (steps.includes('binRefs') && data.bin instanceof Object) {
for (const key in data.bin) {
- const binPath = path.resolve(pkg.path, data.bin[key])
try {
- await fs.access(binPath)
+ await fs.access(path.resolve(pkg.path, data.bin[key]))
} catch {
- delete data.bin[key]
+ log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
+ // XXX: should a future breaking change delete bin entries that cannot be accessed?
}
}
}
diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json
index 61607c5bb6ae70..a4e2cbab4c0bdd 100644
--- a/deps/npm/node_modules/@npmcli/package-json/package.json
+++ b/deps/npm/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/package-json",
- "version": "3.1.0",
+ "version": "3.1.1",
"description": "Programmatic API to update package.json",
"main": "lib/index.js",
"files": [
@@ -26,13 +26,17 @@
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/template-oss": "4.15.1",
+ "read-package-json": "^6.0.4",
+ "read-package-json-fast": "^3.0.2",
"tap": "^16.0.1"
},
"dependencies": {
+ "@npmcli/git": "^4.1.0",
"glob": "^10.2.2",
"json-parse-even-better-errors": "^3.0.0",
"normalize-package-data": "^5.0.0",
- "npm-normalize-package-bin": "^3.0.1"
+ "npm-normalize-package-bin": "^3.0.1",
+ "proc-log": "^3.0.0"
},
"repository": {
"type": "git",
diff --git a/deps/npm/node_modules/chalk/license b/deps/npm/node_modules/chalk/license
index e7af2f77107d73..fa7ceba3eb4a96 100644
--- a/deps/npm/node_modules/chalk/license
+++ b/deps/npm/node_modules/chalk/license
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) Sindre Sorhus (sindresorhus.com)
+Copyright (c) Sindre Sorhus (https://sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
diff --git a/deps/npm/node_modules/chalk/package.json b/deps/npm/node_modules/chalk/package.json
index 47c23f29068caa..ddcf7589e9797d 100644
--- a/deps/npm/node_modules/chalk/package.json
+++ b/deps/npm/node_modules/chalk/package.json
@@ -1,21 +1,31 @@
{
"name": "chalk",
- "version": "4.1.2",
+ "version": "5.2.0",
"description": "Terminal string styling done right",
"license": "MIT",
"repository": "chalk/chalk",
"funding": "https://github.com/chalk/chalk?sponsor=1",
- "main": "source",
+ "type": "module",
+ "main": "./source/index.js",
+ "exports": "./source/index.js",
+ "imports": {
+ "#ansi-styles": "./source/vendor/ansi-styles/index.js",
+ "#supports-color": {
+ "node": "./source/vendor/supports-color/index.js",
+ "default": "./source/vendor/supports-color/browser.js"
+ }
+ },
+ "types": "./source/index.d.ts",
"engines": {
- "node": ">=10"
+ "node": "^12.17.0 || ^14.13 || >=16.0.0"
},
"scripts": {
- "test": "xo && nyc ava && tsd",
+ "test": "xo && c8 ava && tsd",
"bench": "matcha benchmark.js"
},
"files": [
"source",
- "index.d.ts"
+ "!source/index.test-d.ts"
],
"keywords": [
"color",
@@ -25,7 +35,6 @@
"console",
"cli",
"string",
- "str",
"ansi",
"style",
"styles",
@@ -40,29 +49,33 @@
"command-line",
"text"
],
- "dependencies": {
- "ansi-styles": "^4.1.0",
- "supports-color": "^7.1.0"
- },
"devDependencies": {
- "ava": "^2.4.0",
- "coveralls": "^3.0.7",
- "execa": "^4.0.0",
- "import-fresh": "^3.1.0",
+ "@types/node": "^16.11.10",
+ "ava": "^3.15.0",
+ "c8": "^7.10.0",
+ "color-convert": "^2.0.1",
+ "execa": "^6.0.0",
+ "log-update": "^5.0.0",
"matcha": "^0.7.0",
- "nyc": "^15.0.0",
- "resolve-from": "^5.0.0",
- "tsd": "^0.7.4",
- "xo": "^0.28.2"
+ "tsd": "^0.19.0",
+ "xo": "^0.53.0",
+ "yoctodelay": "^2.0.0"
},
"xo": {
"rules": {
"unicorn/prefer-string-slice": "off",
- "unicorn/prefer-includes": "off",
- "@typescript-eslint/member-ordering": "off",
- "no-redeclare": "off",
- "unicorn/string-content": "off",
- "unicorn/better-regex": "off"
+ "@typescript-eslint/consistent-type-imports": "off",
+ "@typescript-eslint/consistent-type-exports": "off",
+ "@typescript-eslint/consistent-type-definitions": "off"
}
+ },
+ "c8": {
+ "reporter": [
+ "text",
+ "lcov"
+ ],
+ "exclude": [
+ "source/vendor"
+ ]
}
}
diff --git a/deps/npm/node_modules/chalk/source/index.js b/deps/npm/node_modules/chalk/source/index.js
index 75ec66350527a8..8bc993da5d6229 100644
--- a/deps/npm/node_modules/chalk/source/index.js
+++ b/deps/npm/node_modules/chalk/source/index.js
@@ -1,19 +1,22 @@
-'use strict';
-const ansiStyles = require('ansi-styles');
-const {stdout: stdoutColor, stderr: stderrColor} = require('supports-color');
-const {
+import ansiStyles from '#ansi-styles';
+import supportsColor from '#supports-color';
+import { // eslint-disable-line import/order
stringReplaceAll,
- stringEncaseCRLFWithFirstIndex
-} = require('./util');
+ stringEncaseCRLFWithFirstIndex,
+} from './utilities.js';
-const {isArray} = Array;
+const {stdout: stdoutColor, stderr: stderrColor} = supportsColor;
+
+const GENERATOR = Symbol('GENERATOR');
+const STYLER = Symbol('STYLER');
+const IS_EMPTY = Symbol('IS_EMPTY');
// `supportsColor.level` → `ansiStyles.color[name]` mapping
const levelMapping = [
'ansi',
'ansi',
'ansi256',
- 'ansi16m'
+ 'ansi16m',
];
const styles = Object.create(null);
@@ -28,7 +31,7 @@ const applyOptions = (object, options = {}) => {
object.level = options.level === undefined ? colorLevel : options.level;
};
-class ChalkClass {
+export class Chalk {
constructor(options) {
// eslint-disable-next-line no-constructor-return
return chalkFactory(options);
@@ -36,69 +39,80 @@ class ChalkClass {
}
const chalkFactory = options => {
- const chalk = {};
+ const chalk = (...strings) => strings.join(' ');
applyOptions(chalk, options);
- chalk.template = (...arguments_) => chalkTag(chalk.template, ...arguments_);
-
- Object.setPrototypeOf(chalk, Chalk.prototype);
- Object.setPrototypeOf(chalk.template, chalk);
-
- chalk.template.constructor = () => {
- throw new Error('`chalk.constructor()` is deprecated. Use `new chalk.Instance()` instead.');
- };
-
- chalk.template.Instance = ChalkClass;
+ Object.setPrototypeOf(chalk, createChalk.prototype);
- return chalk.template;
+ return chalk;
};
-function Chalk(options) {
+function createChalk(options) {
return chalkFactory(options);
}
+Object.setPrototypeOf(createChalk.prototype, Function.prototype);
+
for (const [styleName, style] of Object.entries(ansiStyles)) {
styles[styleName] = {
get() {
- const builder = createBuilder(this, createStyler(style.open, style.close, this._styler), this._isEmpty);
+ const builder = createBuilder(this, createStyler(style.open, style.close, this[STYLER]), this[IS_EMPTY]);
Object.defineProperty(this, styleName, {value: builder});
return builder;
- }
+ },
};
}
styles.visible = {
get() {
- const builder = createBuilder(this, this._styler, true);
+ const builder = createBuilder(this, this[STYLER], true);
Object.defineProperty(this, 'visible', {value: builder});
return builder;
+ },
+};
+
+const getModelAnsi = (model, level, type, ...arguments_) => {
+ if (model === 'rgb') {
+ if (level === 'ansi16m') {
+ return ansiStyles[type].ansi16m(...arguments_);
+ }
+
+ if (level === 'ansi256') {
+ return ansiStyles[type].ansi256(ansiStyles.rgbToAnsi256(...arguments_));
+ }
+
+ return ansiStyles[type].ansi(ansiStyles.rgbToAnsi(...arguments_));
}
+
+ if (model === 'hex') {
+ return getModelAnsi('rgb', level, type, ...ansiStyles.hexToRgb(...arguments_));
+ }
+
+ return ansiStyles[type][model](...arguments_);
};
-const usedModels = ['rgb', 'hex', 'keyword', 'hsl', 'hsv', 'hwb', 'ansi', 'ansi256'];
+const usedModels = ['rgb', 'hex', 'ansi256'];
for (const model of usedModels) {
styles[model] = {
get() {
const {level} = this;
return function (...arguments_) {
- const styler = createStyler(ansiStyles.color[levelMapping[level]][model](...arguments_), ansiStyles.color.close, this._styler);
- return createBuilder(this, styler, this._isEmpty);
+ const styler = createStyler(getModelAnsi(model, levelMapping[level], 'color', ...arguments_), ansiStyles.color.close, this[STYLER]);
+ return createBuilder(this, styler, this[IS_EMPTY]);
};
- }
+ },
};
-}
-for (const model of usedModels) {
const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1);
styles[bgModel] = {
get() {
const {level} = this;
return function (...arguments_) {
- const styler = createStyler(ansiStyles.bgColor[levelMapping[level]][model](...arguments_), ansiStyles.bgColor.close, this._styler);
- return createBuilder(this, styler, this._isEmpty);
+ const styler = createStyler(getModelAnsi(model, levelMapping[level], 'bgColor', ...arguments_), ansiStyles.bgColor.close, this[STYLER]);
+ return createBuilder(this, styler, this[IS_EMPTY]);
};
- }
+ },
};
}
@@ -107,12 +121,12 @@ const proto = Object.defineProperties(() => {}, {
level: {
enumerable: true,
get() {
- return this._generator.level;
+ return this[GENERATOR].level;
},
set(level) {
- this._generator.level = level;
- }
- }
+ this[GENERATOR].level = level;
+ },
+ },
});
const createStyler = (open, close, parent) => {
@@ -131,46 +145,39 @@ const createStyler = (open, close, parent) => {
close,
openAll,
closeAll,
- parent
+ parent,
};
};
const createBuilder = (self, _styler, _isEmpty) => {
- const builder = (...arguments_) => {
- if (isArray(arguments_[0]) && isArray(arguments_[0].raw)) {
- // Called as a template literal, for example: chalk.red`2 + 3 = {bold ${2+3}}`
- return applyStyle(builder, chalkTag(builder, ...arguments_));
- }
-
- // Single argument is hot path, implicit coercion is faster than anything
- // eslint-disable-next-line no-implicit-coercion
- return applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' '));
- };
+ // Single argument is hot path, implicit coercion is faster than anything
+ // eslint-disable-next-line no-implicit-coercion
+ const builder = (...arguments_) => applyStyle(builder, (arguments_.length === 1) ? ('' + arguments_[0]) : arguments_.join(' '));
// We alter the prototype because we must return a function, but there is
// no way to create a function with a different prototype
Object.setPrototypeOf(builder, proto);
- builder._generator = self;
- builder._styler = _styler;
- builder._isEmpty = _isEmpty;
+ builder[GENERATOR] = self;
+ builder[STYLER] = _styler;
+ builder[IS_EMPTY] = _isEmpty;
return builder;
};
const applyStyle = (self, string) => {
if (self.level <= 0 || !string) {
- return self._isEmpty ? '' : string;
+ return self[IS_EMPTY] ? '' : string;
}
- let styler = self._styler;
+ let styler = self[STYLER];
if (styler === undefined) {
return string;
}
const {openAll, closeAll} = styler;
- if (string.indexOf('\u001B') !== -1) {
+ if (string.includes('\u001B')) {
while (styler !== undefined) {
// Replace any instances already present with a re-opening code
// otherwise only the part of the string until said closing code
@@ -192,38 +199,27 @@ const applyStyle = (self, string) => {
return openAll + string + closeAll;
};
-let template;
-const chalkTag = (chalk, ...strings) => {
- const [firstString] = strings;
-
- if (!isArray(firstString) || !isArray(firstString.raw)) {
- // If chalk() was called by itself or with a string,
- // return the string itself as a string.
- return strings.join(' ');
- }
+Object.defineProperties(createChalk.prototype, styles);
- const arguments_ = strings.slice(1);
- const parts = [firstString.raw[0]];
+const chalk = createChalk();
+export const chalkStderr = createChalk({level: stderrColor ? stderrColor.level : 0});
- for (let i = 1; i < firstString.length; i++) {
- parts.push(
- String(arguments_[i - 1]).replace(/[{}\\]/g, '\\$&'),
- String(firstString.raw[i])
- );
- }
+export {
+ modifierNames,
+ foregroundColorNames,
+ backgroundColorNames,
+ colorNames,
- if (template === undefined) {
- template = require('./templates');
- }
+ // TODO: Remove these aliases in the next major version
+ modifierNames as modifiers,
+ foregroundColorNames as foregroundColors,
+ backgroundColorNames as backgroundColors,
+ colorNames as colors,
+} from './vendor/ansi-styles/index.js';
- return template(chalk, parts.join(''));
+export {
+ stdoutColor as supportsColor,
+ stderrColor as supportsColorStderr,
};
-Object.defineProperties(Chalk.prototype, styles);
-
-const chalk = Chalk(); // eslint-disable-line new-cap
-chalk.supportsColor = stdoutColor;
-chalk.stderr = Chalk({level: stderrColor ? stderrColor.level : 0}); // eslint-disable-line new-cap
-chalk.stderr.supportsColor = stderrColor;
-
-module.exports = chalk;
+export default chalk;
diff --git a/deps/npm/node_modules/chalk/source/templates.js b/deps/npm/node_modules/chalk/source/templates.js
deleted file mode 100644
index b130949d646fda..00000000000000
--- a/deps/npm/node_modules/chalk/source/templates.js
+++ /dev/null
@@ -1,134 +0,0 @@
-'use strict';
-const TEMPLATE_REGEX = /(?:\\(u(?:[a-f\d]{4}|\{[a-f\d]{1,6}\})|x[a-f\d]{2}|.))|(?:\{(~)?(\w+(?:\([^)]*\))?(?:\.\w+(?:\([^)]*\))?)*)(?:[ \t]|(?=\r?\n)))|(\})|((?:.|[\r\n\f])+?)/gi;
-const STYLE_REGEX = /(?:^|\.)(\w+)(?:\(([^)]*)\))?/g;
-const STRING_REGEX = /^(['"])((?:\\.|(?!\1)[^\\])*)\1$/;
-const ESCAPE_REGEX = /\\(u(?:[a-f\d]{4}|{[a-f\d]{1,6}})|x[a-f\d]{2}|.)|([^\\])/gi;
-
-const ESCAPES = new Map([
- ['n', '\n'],
- ['r', '\r'],
- ['t', '\t'],
- ['b', '\b'],
- ['f', '\f'],
- ['v', '\v'],
- ['0', '\0'],
- ['\\', '\\'],
- ['e', '\u001B'],
- ['a', '\u0007']
-]);
-
-function unescape(c) {
- const u = c[0] === 'u';
- const bracket = c[1] === '{';
-
- if ((u && !bracket && c.length === 5) || (c[0] === 'x' && c.length === 3)) {
- return String.fromCharCode(parseInt(c.slice(1), 16));
- }
-
- if (u && bracket) {
- return String.fromCodePoint(parseInt(c.slice(2, -1), 16));
- }
-
- return ESCAPES.get(c) || c;
-}
-
-function parseArguments(name, arguments_) {
- const results = [];
- const chunks = arguments_.trim().split(/\s*,\s*/g);
- let matches;
-
- for (const chunk of chunks) {
- const number = Number(chunk);
- if (!Number.isNaN(number)) {
- results.push(number);
- } else if ((matches = chunk.match(STRING_REGEX))) {
- results.push(matches[2].replace(ESCAPE_REGEX, (m, escape, character) => escape ? unescape(escape) : character));
- } else {
- throw new Error(`Invalid Chalk template style argument: ${chunk} (in style '${name}')`);
- }
- }
-
- return results;
-}
-
-function parseStyle(style) {
- STYLE_REGEX.lastIndex = 0;
-
- const results = [];
- let matches;
-
- while ((matches = STYLE_REGEX.exec(style)) !== null) {
- const name = matches[1];
-
- if (matches[2]) {
- const args = parseArguments(name, matches[2]);
- results.push([name].concat(args));
- } else {
- results.push([name]);
- }
- }
-
- return results;
-}
-
-function buildStyle(chalk, styles) {
- const enabled = {};
-
- for (const layer of styles) {
- for (const style of layer.styles) {
- enabled[style[0]] = layer.inverse ? null : style.slice(1);
- }
- }
-
- let current = chalk;
- for (const [styleName, styles] of Object.entries(enabled)) {
- if (!Array.isArray(styles)) {
- continue;
- }
-
- if (!(styleName in current)) {
- throw new Error(`Unknown Chalk style: ${styleName}`);
- }
-
- current = styles.length > 0 ? current[styleName](...styles) : current[styleName];
- }
-
- return current;
-}
-
-module.exports = (chalk, temporary) => {
- const styles = [];
- const chunks = [];
- let chunk = [];
-
- // eslint-disable-next-line max-params
- temporary.replace(TEMPLATE_REGEX, (m, escapeCharacter, inverse, style, close, character) => {
- if (escapeCharacter) {
- chunk.push(unescape(escapeCharacter));
- } else if (style) {
- const string = chunk.join('');
- chunk = [];
- chunks.push(styles.length === 0 ? string : buildStyle(chalk, styles)(string));
- styles.push({inverse, styles: parseStyle(style)});
- } else if (close) {
- if (styles.length === 0) {
- throw new Error('Found extraneous } in Chalk template literal');
- }
-
- chunks.push(buildStyle(chalk, styles)(chunk.join('')));
- chunk = [];
- styles.pop();
- } else {
- chunk.push(character);
- }
- });
-
- chunks.push(chunk.join(''));
-
- if (styles.length > 0) {
- const errMessage = `Chalk template literal is missing ${styles.length} closing bracket${styles.length === 1 ? '' : 's'} (\`}\`)`;
- throw new Error(errMessage);
- }
-
- return chunks.join('');
-};
diff --git a/deps/npm/node_modules/chalk/source/util.js b/deps/npm/node_modules/chalk/source/util.js
deleted file mode 100644
index ca466fd466c07f..00000000000000
--- a/deps/npm/node_modules/chalk/source/util.js
+++ /dev/null
@@ -1,39 +0,0 @@
-'use strict';
-
-const stringReplaceAll = (string, substring, replacer) => {
- let index = string.indexOf(substring);
- if (index === -1) {
- return string;
- }
-
- const substringLength = substring.length;
- let endIndex = 0;
- let returnValue = '';
- do {
- returnValue += string.substr(endIndex, index - endIndex) + substring + replacer;
- endIndex = index + substringLength;
- index = string.indexOf(substring, endIndex);
- } while (index !== -1);
-
- returnValue += string.substr(endIndex);
- return returnValue;
-};
-
-const stringEncaseCRLFWithFirstIndex = (string, prefix, postfix, index) => {
- let endIndex = 0;
- let returnValue = '';
- do {
- const gotCR = string[index - 1] === '\r';
- returnValue += string.substr(endIndex, (gotCR ? index - 1 : index) - endIndex) + prefix + (gotCR ? '\r\n' : '\n') + postfix;
- endIndex = index + 1;
- index = string.indexOf('\n', endIndex);
- } while (index !== -1);
-
- returnValue += string.substr(endIndex);
- return returnValue;
-};
-
-module.exports = {
- stringReplaceAll,
- stringEncaseCRLFWithFirstIndex
-};
diff --git a/deps/npm/node_modules/chalk/source/utilities.js b/deps/npm/node_modules/chalk/source/utilities.js
new file mode 100644
index 00000000000000..4366dee0d84d72
--- /dev/null
+++ b/deps/npm/node_modules/chalk/source/utilities.js
@@ -0,0 +1,33 @@
+// TODO: When targeting Node.js 16, use `String.prototype.replaceAll`.
+export function stringReplaceAll(string, substring, replacer) {
+ let index = string.indexOf(substring);
+ if (index === -1) {
+ return string;
+ }
+
+ const substringLength = substring.length;
+ let endIndex = 0;
+ let returnValue = '';
+ do {
+ returnValue += string.slice(endIndex, index) + substring + replacer;
+ endIndex = index + substringLength;
+ index = string.indexOf(substring, endIndex);
+ } while (index !== -1);
+
+ returnValue += string.slice(endIndex);
+ return returnValue;
+}
+
+export function stringEncaseCRLFWithFirstIndex(string, prefix, postfix, index) {
+ let endIndex = 0;
+ let returnValue = '';
+ do {
+ const gotCR = string[index - 1] === '\r';
+ returnValue += string.slice(endIndex, (gotCR ? index - 1 : index)) + prefix + (gotCR ? '\r\n' : '\n') + postfix;
+ endIndex = index + 1;
+ index = string.indexOf('\n', endIndex);
+ } while (index !== -1);
+
+ returnValue += string.slice(endIndex);
+ return returnValue;
+}
diff --git a/deps/npm/node_modules/chalk/source/vendor/ansi-styles/index.js b/deps/npm/node_modules/chalk/source/vendor/ansi-styles/index.js
new file mode 100644
index 00000000000000..eaa7bed6cb1ed9
--- /dev/null
+++ b/deps/npm/node_modules/chalk/source/vendor/ansi-styles/index.js
@@ -0,0 +1,223 @@
+const ANSI_BACKGROUND_OFFSET = 10;
+
+const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`;
+
+const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`;
+
+const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`;
+
+const styles = {
+ modifier: {
+ reset: [0, 0],
+ // 21 isn't widely supported and 22 does the same thing
+ bold: [1, 22],
+ dim: [2, 22],
+ italic: [3, 23],
+ underline: [4, 24],
+ overline: [53, 55],
+ inverse: [7, 27],
+ hidden: [8, 28],
+ strikethrough: [9, 29],
+ },
+ color: {
+ black: [30, 39],
+ red: [31, 39],
+ green: [32, 39],
+ yellow: [33, 39],
+ blue: [34, 39],
+ magenta: [35, 39],
+ cyan: [36, 39],
+ white: [37, 39],
+
+ // Bright color
+ blackBright: [90, 39],
+ gray: [90, 39], // Alias of `blackBright`
+ grey: [90, 39], // Alias of `blackBright`
+ redBright: [91, 39],
+ greenBright: [92, 39],
+ yellowBright: [93, 39],
+ blueBright: [94, 39],
+ magentaBright: [95, 39],
+ cyanBright: [96, 39],
+ whiteBright: [97, 39],
+ },
+ bgColor: {
+ bgBlack: [40, 49],
+ bgRed: [41, 49],
+ bgGreen: [42, 49],
+ bgYellow: [43, 49],
+ bgBlue: [44, 49],
+ bgMagenta: [45, 49],
+ bgCyan: [46, 49],
+ bgWhite: [47, 49],
+
+ // Bright color
+ bgBlackBright: [100, 49],
+ bgGray: [100, 49], // Alias of `bgBlackBright`
+ bgGrey: [100, 49], // Alias of `bgBlackBright`
+ bgRedBright: [101, 49],
+ bgGreenBright: [102, 49],
+ bgYellowBright: [103, 49],
+ bgBlueBright: [104, 49],
+ bgMagentaBright: [105, 49],
+ bgCyanBright: [106, 49],
+ bgWhiteBright: [107, 49],
+ },
+};
+
+export const modifierNames = Object.keys(styles.modifier);
+export const foregroundColorNames = Object.keys(styles.color);
+export const backgroundColorNames = Object.keys(styles.bgColor);
+export const colorNames = [...foregroundColorNames, ...backgroundColorNames];
+
+function assembleStyles() {
+ const codes = new Map();
+
+ for (const [groupName, group] of Object.entries(styles)) {
+ for (const [styleName, style] of Object.entries(group)) {
+ styles[styleName] = {
+ open: `\u001B[${style[0]}m`,
+ close: `\u001B[${style[1]}m`,
+ };
+
+ group[styleName] = styles[styleName];
+
+ codes.set(style[0], style[1]);
+ }
+
+ Object.defineProperty(styles, groupName, {
+ value: group,
+ enumerable: false,
+ });
+ }
+
+ Object.defineProperty(styles, 'codes', {
+ value: codes,
+ enumerable: false,
+ });
+
+ styles.color.close = '\u001B[39m';
+ styles.bgColor.close = '\u001B[49m';
+
+ styles.color.ansi = wrapAnsi16();
+ styles.color.ansi256 = wrapAnsi256();
+ styles.color.ansi16m = wrapAnsi16m();
+ styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET);
+ styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET);
+ styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET);
+
+ // From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
+ Object.defineProperties(styles, {
+ rgbToAnsi256: {
+ value(red, green, blue) {
+ // We use the extended greyscale palette here, with the exception of
+ // black and white. normal palette only has 4 greyscale shades.
+ if (red === green && green === blue) {
+ if (red < 8) {
+ return 16;
+ }
+
+ if (red > 248) {
+ return 231;
+ }
+
+ return Math.round(((red - 8) / 247) * 24) + 232;
+ }
+
+ return 16
+ + (36 * Math.round(red / 255 * 5))
+ + (6 * Math.round(green / 255 * 5))
+ + Math.round(blue / 255 * 5);
+ },
+ enumerable: false,
+ },
+ hexToRgb: {
+ value(hex) {
+ const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16));
+ if (!matches) {
+ return [0, 0, 0];
+ }
+
+ let [colorString] = matches;
+
+ if (colorString.length === 3) {
+ colorString = [...colorString].map(character => character + character).join('');
+ }
+
+ const integer = Number.parseInt(colorString, 16);
+
+ return [
+ /* eslint-disable no-bitwise */
+ (integer >> 16) & 0xFF,
+ (integer >> 8) & 0xFF,
+ integer & 0xFF,
+ /* eslint-enable no-bitwise */
+ ];
+ },
+ enumerable: false,
+ },
+ hexToAnsi256: {
+ value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)),
+ enumerable: false,
+ },
+ ansi256ToAnsi: {
+ value(code) {
+ if (code < 8) {
+ return 30 + code;
+ }
+
+ if (code < 16) {
+ return 90 + (code - 8);
+ }
+
+ let red;
+ let green;
+ let blue;
+
+ if (code >= 232) {
+ red = (((code - 232) * 10) + 8) / 255;
+ green = red;
+ blue = red;
+ } else {
+ code -= 16;
+
+ const remainder = code % 36;
+
+ red = Math.floor(code / 36) / 5;
+ green = Math.floor(remainder / 6) / 5;
+ blue = (remainder % 6) / 5;
+ }
+
+ const value = Math.max(red, green, blue) * 2;
+
+ if (value === 0) {
+ return 30;
+ }
+
+ // eslint-disable-next-line no-bitwise
+ let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red));
+
+ if (value === 2) {
+ result += 60;
+ }
+
+ return result;
+ },
+ enumerable: false,
+ },
+ rgbToAnsi: {
+ value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)),
+ enumerable: false,
+ },
+ hexToAnsi: {
+ value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)),
+ enumerable: false,
+ },
+ });
+
+ return styles;
+}
+
+const ansiStyles = assembleStyles();
+
+export default ansiStyles;
diff --git a/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js b/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js
new file mode 100644
index 00000000000000..9fa6888f10288e
--- /dev/null
+++ b/deps/npm/node_modules/chalk/source/vendor/supports-color/browser.js
@@ -0,0 +1,30 @@
+/* eslint-env browser */
+
+const level = (() => {
+ if (navigator.userAgentData) {
+ const brand = navigator.userAgentData.brands.find(({brand}) => brand === 'Chromium');
+ if (brand && brand.version > 93) {
+ return 3;
+ }
+ }
+
+ if (/\b(Chrome|Chromium)\//.test(navigator.userAgent)) {
+ return 1;
+ }
+
+ return 0;
+})();
+
+const colorSupport = level !== 0 && {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3,
+};
+
+const supportsColor = {
+ stdout: colorSupport,
+ stderr: colorSupport,
+};
+
+export default supportsColor;
diff --git a/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
new file mode 100644
index 00000000000000..a7cea61e9eb5fd
--- /dev/null
+++ b/deps/npm/node_modules/chalk/source/vendor/supports-color/index.js
@@ -0,0 +1,181 @@
+import process from 'node:process';
+import os from 'node:os';
+import tty from 'node:tty';
+
+// From: https://github.com/sindresorhus/has-flag/blob/main/index.js
+function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : process.argv) {
+ const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+ const position = argv.indexOf(prefix + flag);
+ const terminatorPosition = argv.indexOf('--');
+ return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
+}
+
+const {env} = process;
+
+let flagForceColor;
+if (
+ hasFlag('no-color')
+ || hasFlag('no-colors')
+ || hasFlag('color=false')
+ || hasFlag('color=never')
+) {
+ flagForceColor = 0;
+} else if (
+ hasFlag('color')
+ || hasFlag('colors')
+ || hasFlag('color=true')
+ || hasFlag('color=always')
+) {
+ flagForceColor = 1;
+}
+
+function envForceColor() {
+ if ('FORCE_COLOR' in env) {
+ if (env.FORCE_COLOR === 'true') {
+ return 1;
+ }
+
+ if (env.FORCE_COLOR === 'false') {
+ return 0;
+ }
+
+ return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3);
+ }
+}
+
+function translateLevel(level) {
+ if (level === 0) {
+ return false;
+ }
+
+ return {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3,
+ };
+}
+
+function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
+ const noFlagForceColor = envForceColor();
+ if (noFlagForceColor !== undefined) {
+ flagForceColor = noFlagForceColor;
+ }
+
+ const forceColor = sniffFlags ? flagForceColor : noFlagForceColor;
+
+ if (forceColor === 0) {
+ return 0;
+ }
+
+ if (sniffFlags) {
+ if (hasFlag('color=16m')
+ || hasFlag('color=full')
+ || hasFlag('color=truecolor')) {
+ return 3;
+ }
+
+ if (hasFlag('color=256')) {
+ return 2;
+ }
+ }
+
+ // Check for Azure DevOps pipelines.
+ // Has to be above the `!streamIsTTY` check.
+ if ('TF_BUILD' in env && 'AGENT_NAME' in env) {
+ return 1;
+ }
+
+ if (haveStream && !streamIsTTY && forceColor === undefined) {
+ return 0;
+ }
+
+ const min = forceColor || 0;
+
+ if (env.TERM === 'dumb') {
+ return min;
+ }
+
+ if (process.platform === 'win32') {
+ // Windows 10 build 10586 is the first Windows release that supports 256 colors.
+ // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
+ const osRelease = os.release().split('.');
+ if (
+ Number(osRelease[0]) >= 10
+ && Number(osRelease[2]) >= 10_586
+ ) {
+ return Number(osRelease[2]) >= 14_931 ? 3 : 2;
+ }
+
+ return 1;
+ }
+
+ if ('CI' in env) {
+ if ('GITHUB_ACTIONS' in env) {
+ return 3;
+ }
+
+ if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ return 1;
+ }
+
+ return min;
+ }
+
+ if ('TEAMCITY_VERSION' in env) {
+ return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+ }
+
+ if (env.COLORTERM === 'truecolor') {
+ return 3;
+ }
+
+ if (env.TERM === 'xterm-kitty') {
+ return 3;
+ }
+
+ if ('TERM_PROGRAM' in env) {
+ const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+
+ switch (env.TERM_PROGRAM) {
+ case 'iTerm.app': {
+ return version >= 3 ? 3 : 2;
+ }
+
+ case 'Apple_Terminal': {
+ return 2;
+ }
+ // No default
+ }
+ }
+
+ if (/-256(color)?$/i.test(env.TERM)) {
+ return 2;
+ }
+
+ if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+ return 1;
+ }
+
+ if ('COLORTERM' in env) {
+ return 1;
+ }
+
+ return min;
+}
+
+export function createSupportsColor(stream, options = {}) {
+ const level = _supportsColor(stream, {
+ streamIsTTY: stream && stream.isTTY,
+ ...options,
+ });
+
+ return translateLevel(level);
+}
+
+const supportsColor = {
+ stdout: createSupportsColor({isTTY: tty.isatty(1)}),
+ stderr: createSupportsColor({isTTY: tty.isatty(2)}),
+};
+
+export default supportsColor;
diff --git a/deps/npm/node_modules/has-flag/index.js b/deps/npm/node_modules/has-flag/index.js
deleted file mode 100644
index b6f80b1f8ffd76..00000000000000
--- a/deps/npm/node_modules/has-flag/index.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict';
-
-module.exports = (flag, argv = process.argv) => {
- const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
- const position = argv.indexOf(prefix + flag);
- const terminatorPosition = argv.indexOf('--');
- return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition);
-};
diff --git a/deps/npm/node_modules/has-flag/package.json b/deps/npm/node_modules/has-flag/package.json
deleted file mode 100644
index a9cba4b856d046..00000000000000
--- a/deps/npm/node_modules/has-flag/package.json
+++ /dev/null
@@ -1,46 +0,0 @@
-{
- "name": "has-flag",
- "version": "4.0.0",
- "description": "Check if argv has a specific flag",
- "license": "MIT",
- "repository": "sindresorhus/has-flag",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "engines": {
- "node": ">=8"
- },
- "scripts": {
- "test": "xo && ava && tsd"
- },
- "files": [
- "index.js",
- "index.d.ts"
- ],
- "keywords": [
- "has",
- "check",
- "detect",
- "contains",
- "find",
- "flag",
- "cli",
- "command-line",
- "argv",
- "process",
- "arg",
- "args",
- "argument",
- "arguments",
- "getopt",
- "minimist",
- "optimist"
- ],
- "devDependencies": {
- "ava": "^1.4.1",
- "tsd": "^0.7.2",
- "xo": "^0.24.0"
- }
-}
diff --git a/deps/npm/node_modules/libnpmexec/README.md b/deps/npm/node_modules/libnpmexec/README.md
index dc79d12cffc456..fb4a1e32b18df7 100644
--- a/deps/npm/node_modules/libnpmexec/README.md
+++ b/deps/npm/node_modules/libnpmexec/README.md
@@ -31,7 +31,7 @@ await libexec({
- `call`: An alternative command to run when using `packages` option **String**, defaults to empty string.
- `cache`: The path location to where the npm cache folder is placed **String**
- `npxCache`: The path location to where the npx cache folder is placed **String**
- - `color`: Output should use color? **Boolean**, defaults to `false`
+ - `chalk`: Chalk instance to use for colors? **Required**
- `localBin`: Location to the `node_modules/.bin` folder of the local project to start scanning for bin files **String**, defaults to `./node_modules/.bin`. **libexec** will walk up the directory structure looking for `node_modules/.bin` folders in parent folders that might satisfy the current `arg` and will use that bin if found.
- `locationMsg`: Overrides "at location" message when entering interactive mode **String**
- `globalBin`: Location to the global space bin folder, same as: `$(npm bin -g)` **String**, defaults to empty string.
diff --git a/deps/npm/node_modules/libnpmexec/lib/run-script.js b/deps/npm/node_modules/libnpmexec/lib/run-script.js
index ba60395468d626..89dcf2e653036e 100644
--- a/deps/npm/node_modules/libnpmexec/lib/run-script.js
+++ b/deps/npm/node_modules/libnpmexec/lib/run-script.js
@@ -1,4 +1,3 @@
-const chalk = require('chalk')
const ciInfo = require('ci-info')
const runScript = require('@npmcli/run-script')
const readPackageJson = require('read-package-json-fast')
@@ -6,12 +5,6 @@ const npmlog = require('npmlog')
const log = require('proc-log')
const noTTY = require('./no-tty.js')
-const nocolor = {
- reset: s => s,
- bold: s => s,
- dim: s => s,
-}
-
const run = async ({
args,
call,
@@ -25,8 +18,6 @@ const run = async ({
}) => {
// turn list of args into command string
const script = call || args.shift() || scriptShell
- const color = !!flatOptions.color
- const colorize = color ? chalk : nocolor
// do the fakey runScript dance
// still should work if no package.json in cwd
@@ -49,14 +40,14 @@ const run = async ({
return log.warn('exec', 'Interactive mode disabled in CI environment')
}
- locationMsg = locationMsg || ` at location:\n${colorize.dim(runPath)}`
+ locationMsg = locationMsg || ` at location:\n${flatOptions.chalk.dim(runPath)}`
output(`${
- colorize.reset('\nEntering npm script environment')
+ flatOptions.chalk.reset('\nEntering npm script environment')
}${
- colorize.reset(locationMsg)
+ flatOptions.chalk.reset(locationMsg)
}${
- colorize.bold('\nType \'exit\' or ^D when finished\n')
+ flatOptions.chalk.bold('\nType \'exit\' or ^D when finished\n')
}`)
}
}
diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json
index e47e301ab7dd77..1fa85ff033b8cb 100644
--- a/deps/npm/node_modules/libnpmexec/package.json
+++ b/deps/npm/node_modules/libnpmexec/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmexec",
- "version": "5.0.17",
+ "version": "6.0.0",
"files": [
"bin/",
"lib/"
@@ -42,7 +42,6 @@
"template-oss-apply": "template-oss-apply --force"
},
"tap": {
- "color": true,
"files": "test/*.js",
"nyc-arg": [
"--exclude",
@@ -54,6 +53,7 @@
"@npmcli/mock-registry": "^1.0.0",
"@npmcli/template-oss": "4.14.1",
"bin-links": "^4.0.1",
+ "chalk": "^5.2.0",
"just-extend": "^6.2.0",
"just-safe-set": "^4.2.1",
"minify-registry-metadata": "^3.0.0",
@@ -62,7 +62,6 @@
"dependencies": {
"@npmcli/arborist": "^6.2.9",
"@npmcli/run-script": "^6.0.0",
- "chalk": "^4.1.0",
"ci-info": "^3.7.1",
"npm-package-arg": "^10.1.0",
"npmlog": "^7.0.1",
diff --git a/deps/npm/node_modules/libnpmpublish/README.md b/deps/npm/node_modules/libnpmpublish/README.md
index 9c9c61d4b59657..90b1f7c68ab4f2 100644
--- a/deps/npm/node_modules/libnpmpublish/README.md
+++ b/deps/npm/node_modules/libnpmpublish/README.md
@@ -51,6 +51,17 @@ A couple of options of note:
token for the registry. For other ways to pass in auth details, see the
n-r-f docs.
+* `opts.provenance` - when running in a supported CI environment, will trigger
+ the generation of a signed provenance statement to be published alongside
+ the package. Mutually exclusive with the `provenanceFile` option.
+
+* `opts.provenanceFile` - specifies the path to an externally-generated
+ provenance statement to be published alongside the package. Mutually
+ exclusive with the `provenance` option. The specified file should be a
+ [Sigstore Bundle](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto)
+ containing a [DSSE](https://github.com/secure-systems-lab/dsse)-packaged
+ provenance statement.
+
#### `> libpub.publish(manifest, tarData, [opts]) -> Promise`
Sends the package represented by the `manifest` and `tarData` to the
diff --git a/deps/npm/node_modules/libnpmpublish/lib/provenance.js b/deps/npm/node_modules/libnpmpublish/lib/provenance.js
index 1eb870da5f24f7..ebe4a24475331d 100644
--- a/deps/npm/node_modules/libnpmpublish/lib/provenance.js
+++ b/deps/npm/node_modules/libnpmpublish/lib/provenance.js
@@ -1,4 +1,5 @@
const { sigstore } = require('sigstore')
+const { readFile } = require('fs/promises')
const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json'
const INTOTO_STATEMENT_TYPE = 'https://in-toto.io/Statement/v0.1'
@@ -66,6 +67,50 @@ const generateProvenance = async (subject, opts) => {
return sigstore.attest(Buffer.from(JSON.stringify(payload)), INTOTO_PAYLOAD_TYPE, opts)
}
+const verifyProvenance = async (subject, provenancePath) => {
+ let provenanceBundle
+ try {
+ provenanceBundle = JSON.parse(await readFile(provenancePath))
+ } catch (err) {
+ err.message = `Invalid provenance provided: ${err.message}`
+ throw err
+ }
+
+ const payload = extractProvenance(provenanceBundle)
+ if (!payload.subject || !payload.subject.length) {
+ throw new Error('No subject found in sigstore bundle payload')
+ }
+ if (payload.subject.length > 1) {
+ throw new Error('Found more than one subject in the sigstore bundle payload')
+ }
+
+ const bundleSubject = payload.subject[0]
+ if (subject.name !== bundleSubject.name) {
+ throw new Error(
+ `Provenance subject ${bundleSubject.name} does not match the package: ${subject.name}`
+ )
+ }
+ if (subject.digest.sha512 !== bundleSubject.digest.sha512) {
+ throw new Error('Provenance subject digest does not match the package')
+ }
+
+ await sigstore.verify(provenanceBundle)
+ return provenanceBundle
+}
+
+const extractProvenance = (bundle) => {
+ if (!bundle?.dsseEnvelope?.payload) {
+ throw new Error('No dsseEnvelope with payload found in sigstore bundle')
+ }
+ try {
+ return JSON.parse(Buffer.from(bundle.dsseEnvelope.payload, 'base64').toString('utf8'))
+ } catch (err) {
+ err.message = `Failed to parse payload from dsseEnvelope: ${err.message}`
+ throw err
+ }
+}
+
module.exports = {
generateProvenance,
+ verifyProvenance,
}
diff --git a/deps/npm/node_modules/libnpmpublish/lib/publish.js b/deps/npm/node_modules/libnpmpublish/lib/publish.js
index 79c00eb68ad0c8..3749c3cebfdc8b 100644
--- a/deps/npm/node_modules/libnpmpublish/lib/publish.js
+++ b/deps/npm/node_modules/libnpmpublish/lib/publish.js
@@ -7,7 +7,7 @@ const { URL } = require('url')
const ssri = require('ssri')
const ciInfo = require('ci-info')
-const { generateProvenance } = require('./provenance')
+const { generateProvenance, verifyProvenance } = require('./provenance')
const TLOG_BASE_URL = 'https://search.sigstore.dev/'
@@ -111,7 +111,7 @@ const patchManifest = (_manifest, opts) => {
}
const buildMetadata = async (registry, manifest, tarballData, spec, opts) => {
- const { access, defaultTag, algorithms, provenance } = opts
+ const { access, defaultTag, algorithms, provenance, provenanceFile } = opts
const root = {
_id: manifest.name,
name: manifest.name,
@@ -154,66 +154,31 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => {
// Handle case where --provenance flag was set to true
let transparencyLogUrl
- if (provenance === true) {
+ if (provenance === true || provenanceFile) {
+ let provenanceBundle
const subject = {
name: npa.toPurl(spec),
digest: { sha512: integrity.sha512[0].hexDigest() },
}
- // Ensure that we're running in GHA, currently the only supported build environment
- if (ciInfo.name !== 'GitHub Actions') {
- throw Object.assign(
- new Error('Automatic provenance generation not supported outside of GitHub Actions'),
- { code: 'EUSAGE' }
- )
- }
-
- // Ensure that the GHA OIDC token is available
- if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) {
- throw Object.assign(
- /* eslint-disable-next-line max-len */
- new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'),
- { code: 'EUSAGE' }
- )
- }
-
- // Some registries (e.g. GH packages) require auth to check visibility,
- // and always return 404 when no auth is supplied. In this case we assume
- // the package is always private and require `--access public` to publish
- // with provenance.
- let visibility = { public: false }
- if (opts.provenance === true && opts.access !== 'public') {
- try {
- const res = await npmFetch
- .json(`${registry}/-/package/${spec.escapedName}/visibility`, opts)
- visibility = res
- } catch (err) {
- if (err.code !== 'E404') {
- throw err
- }
+ if (provenance === true) {
+ await ensureProvenanceGeneration(registry, spec, opts)
+ provenanceBundle = await generateProvenance([subject], opts)
+
+ /* eslint-disable-next-line max-len */
+ log.notice('publish', 'Signed provenance statement with source and build information from GitHub Actions')
+
+ const tlogEntry = provenanceBundle?.verificationMaterial?.tlogEntries[0]
+ /* istanbul ignore else */
+ if (tlogEntry) {
+ transparencyLogUrl = `${TLOG_BASE_URL}?logIndex=${tlogEntry.logIndex}`
+ log.notice(
+ 'publish',
+ `Provenance statement published to transparency log: ${transparencyLogUrl}`
+ )
}
- }
-
- if (!visibility.public && opts.provenance === true && opts.access !== 'public') {
- throw Object.assign(
- /* eslint-disable-next-line max-len */
- new Error("Can't generate provenance for new or private package, you must set `access` to public."),
- { code: 'EUSAGE' }
- )
- }
- const provenanceBundle = await generateProvenance([subject], opts)
-
- /* eslint-disable-next-line max-len */
- log.notice('publish', 'Signed provenance statement with source and build information from GitHub Actions')
-
- const tlogEntry = provenanceBundle?.verificationMaterial?.tlogEntries[0]
- /* istanbul ignore else */
- if (tlogEntry) {
- transparencyLogUrl = `${TLOG_BASE_URL}?logIndex=${tlogEntry.logIndex}`
- log.notice(
- 'publish',
- `Provenance statement published to transparency log: ${transparencyLogUrl}`
- )
+ } else {
+ provenanceBundle = await verifyProvenance(subject, provenanceFile)
}
const serializedBundle = JSON.stringify(provenanceBundle)
@@ -275,4 +240,49 @@ const patchMetadata = (current, newData) => {
return current
}
+// Check that all the prereqs are met for provenance generation
+const ensureProvenanceGeneration = async (registry, spec, opts) => {
+ // Ensure that we're running in GHA, currently the only supported build environment
+ if (ciInfo.name !== 'GitHub Actions') {
+ throw Object.assign(
+ new Error('Automatic provenance generation not supported outside of GitHub Actions'),
+ { code: 'EUSAGE' }
+ )
+ }
+
+ // Ensure that the GHA OIDC token is available
+ if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) {
+ throw Object.assign(
+ /* eslint-disable-next-line max-len */
+ new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'),
+ { code: 'EUSAGE' }
+ )
+ }
+
+ // Some registries (e.g. GH packages) require auth to check visibility,
+ // and always return 404 when no auth is supplied. In this case we assume
+ // the package is always private and require `--access public` to publish
+ // with provenance.
+ let visibility = { public: false }
+ if (true && opts.access !== 'public') {
+ try {
+ const res = await npmFetch
+ .json(`${registry}/-/package/${spec.escapedName}/visibility`, opts)
+ visibility = res
+ } catch (err) {
+ if (err.code !== 'E404') {
+ throw err
+ }
+ }
+ }
+
+ if (!visibility.public && opts.provenance === true && opts.access !== 'public') {
+ throw Object.assign(
+ /* eslint-disable-next-line max-len */
+ new Error("Can't generate provenance for new or private package, you must set `access` to public."),
+ { code: 'EUSAGE' }
+ )
+ }
+}
+
module.exports = publish
diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json
index a4adbe2a50f154..0e86861893e070 100644
--- a/deps/npm/node_modules/libnpmpublish/package.json
+++ b/deps/npm/node_modules/libnpmpublish/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpublish",
- "version": "7.2.0",
+ "version": "7.3.0",
"description": "Programmatic API for the bits behind npm publish and unpublish",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -24,6 +24,7 @@
},
"devDependencies": {
"@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
"@npmcli/template-oss": "4.14.1",
"lodash.clonedeep": "^4.5.0",
diff --git a/deps/npm/node_modules/npm-audit-report/lib/colors.js b/deps/npm/node_modules/npm-audit-report/lib/colors.js
index 2fbf5c36093ded..e6688f2f1c8c69 100644
--- a/deps/npm/node_modules/npm-audit-report/lib/colors.js
+++ b/deps/npm/node_modules/npm-audit-report/lib/colors.js
@@ -1,16 +1,14 @@
-const chalk = require('chalk')
-module.exports = color => {
- const identity = x => x
- const green = color ? s => chalk.green.bold(s) : identity
- const red = color ? s => chalk.red.bold(s) : identity
- const magenta = color ? s => chalk.magenta.bold(s) : identity
- const yellow = color ? s => chalk.yellow.bold(s) : identity
- const white = color ? s => chalk.bold(s) : identity
+module.exports = (chalk) => {
+ const green = s => chalk.green.bold(s)
+ const red = s => chalk.red.bold(s)
+ const magenta = s => chalk.magenta.bold(s)
+ const yellow = s => chalk.yellow.bold(s)
+ const white = s => chalk.bold(s)
const severity = (sev, s) => sev.toLowerCase() === 'moderate' ? yellow(s || sev)
: sev.toLowerCase() === 'high' ? red(s || sev)
: sev.toLowerCase() === 'critical' ? magenta(s || sev)
: white(s || sev)
- const dim = color ? s => chalk.dim(s) : identity
+ const dim = s => chalk.dim(s)
return {
dim,
diff --git a/deps/npm/node_modules/npm-audit-report/lib/index.js b/deps/npm/node_modules/npm-audit-report/lib/index.js
index 63063f92526a1b..d0ced01efefec9 100644
--- a/deps/npm/node_modules/npm-audit-report/lib/index.js
+++ b/deps/npm/node_modules/npm-audit-report/lib/index.js
@@ -12,7 +12,7 @@ const exitCode = require('./exit-code.js')
module.exports = Object.assign((data, options = {}) => {
const {
reporter = 'install',
- color = true,
+ chalk,
unicode = true,
indent = 2,
} = options
@@ -35,7 +35,7 @@ module.exports = Object.assign((data, options = {}) => {
}
return {
- report: reporters[reporter](data, { color, unicode, indent }),
+ report: reporters[reporter](data, { chalk, unicode, indent }),
exitCode: exitCode(data, auditLevel),
}
}, { reporters })
diff --git a/deps/npm/node_modules/npm-audit-report/lib/reporters/detail.js b/deps/npm/node_modules/npm-audit-report/lib/reporters/detail.js
index ba2f013836d9da..6dde8ec88de447 100644
--- a/deps/npm/node_modules/npm-audit-report/lib/reporters/detail.js
+++ b/deps/npm/node_modules/npm-audit-report/lib/reporters/detail.js
@@ -3,14 +3,14 @@
const colors = require('../colors.js')
const install = require('./install.js')
-module.exports = (data, { color }) => {
- const summary = install.summary(data, { color })
+module.exports = (data, { chalk }) => {
+ const summary = install.summary(data, { chalk })
const none = data.metadata.vulnerabilities.total === 0
- return none ? summary : fullReport(data, { color, summary })
+ return none ? summary : fullReport(data, { chalk, summary })
}
-const fullReport = (data, { color, summary }) => {
- const c = colors(color)
+const fullReport = (data, { chalk, summary }) => {
+ const c = colors(chalk)
const output = [c.white('# npm audit report'), '']
const printed = new Set()
diff --git a/deps/npm/node_modules/npm-audit-report/lib/reporters/install.js b/deps/npm/node_modules/npm-audit-report/lib/reporters/install.js
index cb8a249691e299..0a1e82533e657c 100644
--- a/deps/npm/node_modules/npm-audit-report/lib/reporters/install.js
+++ b/deps/npm/node_modules/npm-audit-report/lib/reporters/install.js
@@ -1,7 +1,7 @@
const colors = require('../colors.js')
-const calculate = (data, { color }) => {
- const c = colors(color)
+const calculate = (data, { chalk }) => {
+ const c = colors(chalk)
const output = []
const { metadata: { vulnerabilities } } = data
const vulnCount = vulnerabilities.total
diff --git a/deps/npm/node_modules/npm-audit-report/package.json b/deps/npm/node_modules/npm-audit-report/package.json
index 8779f4c1d2c7e2..492071c1faf902 100644
--- a/deps/npm/node_modules/npm-audit-report/package.json
+++ b/deps/npm/node_modules/npm-audit-report/package.json
@@ -1,6 +1,6 @@
{
"name": "npm-audit-report",
- "version": "4.0.0",
+ "version": "5.0.0",
"description": "Given a response from the npm security api, render it into a variety of security reports",
"main": "lib/index.js",
"scripts": {
@@ -28,13 +28,10 @@
],
"author": "GitHub Inc.",
"license": "ISC",
- "dependencies": {
- "chalk": "^4.0.0"
- },
"devDependencies": {
- "@npmcli/eslint-config": "^3.0.1",
- "@npmcli/template-oss": "4.5.1",
- "require-inject": "^1.4.4",
+ "@npmcli/eslint-config": "^4.0.0",
+ "@npmcli/template-oss": "4.14.1",
+ "chalk": "^5.2.0",
"tap": "^16.0.0"
},
"directories": {
@@ -58,6 +55,6 @@
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
- "version": "4.5.1"
+ "version": "4.14.1"
}
}
diff --git a/deps/npm/node_modules/supports-color/browser.js b/deps/npm/node_modules/supports-color/browser.js
deleted file mode 100644
index 62afa3a7425dc6..00000000000000
--- a/deps/npm/node_modules/supports-color/browser.js
+++ /dev/null
@@ -1,5 +0,0 @@
-'use strict';
-module.exports = {
- stdout: false,
- stderr: false
-};
diff --git a/deps/npm/node_modules/supports-color/index.js b/deps/npm/node_modules/supports-color/index.js
deleted file mode 100644
index 6fada390fb88d8..00000000000000
--- a/deps/npm/node_modules/supports-color/index.js
+++ /dev/null
@@ -1,135 +0,0 @@
-'use strict';
-const os = require('os');
-const tty = require('tty');
-const hasFlag = require('has-flag');
-
-const {env} = process;
-
-let forceColor;
-if (hasFlag('no-color') ||
- hasFlag('no-colors') ||
- hasFlag('color=false') ||
- hasFlag('color=never')) {
- forceColor = 0;
-} else if (hasFlag('color') ||
- hasFlag('colors') ||
- hasFlag('color=true') ||
- hasFlag('color=always')) {
- forceColor = 1;
-}
-
-if ('FORCE_COLOR' in env) {
- if (env.FORCE_COLOR === 'true') {
- forceColor = 1;
- } else if (env.FORCE_COLOR === 'false') {
- forceColor = 0;
- } else {
- forceColor = env.FORCE_COLOR.length === 0 ? 1 : Math.min(parseInt(env.FORCE_COLOR, 10), 3);
- }
-}
-
-function translateLevel(level) {
- if (level === 0) {
- return false;
- }
-
- return {
- level,
- hasBasic: true,
- has256: level >= 2,
- has16m: level >= 3
- };
-}
-
-function supportsColor(haveStream, streamIsTTY) {
- if (forceColor === 0) {
- return 0;
- }
-
- if (hasFlag('color=16m') ||
- hasFlag('color=full') ||
- hasFlag('color=truecolor')) {
- return 3;
- }
-
- if (hasFlag('color=256')) {
- return 2;
- }
-
- if (haveStream && !streamIsTTY && forceColor === undefined) {
- return 0;
- }
-
- const min = forceColor || 0;
-
- if (env.TERM === 'dumb') {
- return min;
- }
-
- if (process.platform === 'win32') {
- // Windows 10 build 10586 is the first Windows release that supports 256 colors.
- // Windows 10 build 14931 is the first release that supports 16m/TrueColor.
- const osRelease = os.release().split('.');
- if (
- Number(osRelease[0]) >= 10 &&
- Number(osRelease[2]) >= 10586
- ) {
- return Number(osRelease[2]) >= 14931 ? 3 : 2;
- }
-
- return 1;
- }
-
- if ('CI' in env) {
- if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'GITHUB_ACTIONS', 'BUILDKITE'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
- return 1;
- }
-
- return min;
- }
-
- if ('TEAMCITY_VERSION' in env) {
- return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
- }
-
- if (env.COLORTERM === 'truecolor') {
- return 3;
- }
-
- if ('TERM_PROGRAM' in env) {
- const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
-
- switch (env.TERM_PROGRAM) {
- case 'iTerm.app':
- return version >= 3 ? 3 : 2;
- case 'Apple_Terminal':
- return 2;
- // No default
- }
- }
-
- if (/-256(color)?$/i.test(env.TERM)) {
- return 2;
- }
-
- if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
- return 1;
- }
-
- if ('COLORTERM' in env) {
- return 1;
- }
-
- return min;
-}
-
-function getSupportLevel(stream) {
- const level = supportsColor(stream, stream && stream.isTTY);
- return translateLevel(level);
-}
-
-module.exports = {
- supportsColor: getSupportLevel,
- stdout: translateLevel(supportsColor(true, tty.isatty(1))),
- stderr: translateLevel(supportsColor(true, tty.isatty(2)))
-};
diff --git a/deps/npm/node_modules/supports-color/package.json b/deps/npm/node_modules/supports-color/package.json
deleted file mode 100644
index f7182edcea2baa..00000000000000
--- a/deps/npm/node_modules/supports-color/package.json
+++ /dev/null
@@ -1,53 +0,0 @@
-{
- "name": "supports-color",
- "version": "7.2.0",
- "description": "Detect whether a terminal supports color",
- "license": "MIT",
- "repository": "chalk/supports-color",
- "author": {
- "name": "Sindre Sorhus",
- "email": "sindresorhus@gmail.com",
- "url": "sindresorhus.com"
- },
- "engines": {
- "node": ">=8"
- },
- "scripts": {
- "test": "xo && ava"
- },
- "files": [
- "index.js",
- "browser.js"
- ],
- "keywords": [
- "color",
- "colour",
- "colors",
- "terminal",
- "console",
- "cli",
- "ansi",
- "styles",
- "tty",
- "rgb",
- "256",
- "shell",
- "xterm",
- "command-line",
- "support",
- "supports",
- "capability",
- "detect",
- "truecolor",
- "16m"
- ],
- "dependencies": {
- "has-flag": "^4.0.0"
- },
- "devDependencies": {
- "ava": "^1.4.1",
- "import-fresh": "^3.0.0",
- "xo": "^0.24.0"
- },
- "browser": "browser.js"
-}
diff --git a/deps/npm/package.json b/deps/npm/package.json
index 148a2a3b9e56a4..f417d60fbab8c2 100644
--- a/deps/npm/package.json
+++ b/deps/npm/package.json
@@ -1,10 +1,11 @@
{
- "version": "9.6.7",
+ "version": "9.7.1",
"name": "npm",
"description": "a package manager for JavaScript",
"workspaces": [
"docs",
"smoke-tests",
+ "mock-globals",
"mock-registry",
"workspaces/*"
],
@@ -54,14 +55,14 @@
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
"@npmcli/arborist": "^6.2.9",
- "@npmcli/config": "^6.1.7",
+ "@npmcli/config": "^6.2.0",
"@npmcli/map-workspaces": "^3.0.4",
- "@npmcli/package-json": "^3.1.0",
+ "@npmcli/package-json": "^3.1.1",
"@npmcli/run-script": "^6.0.2",
"abbrev": "^2.0.0",
"archy": "~1.0.0",
"cacache": "^17.1.2",
- "chalk": "^4.1.2",
+ "chalk": "^5.2.0",
"ci-info": "^3.8.0",
"cli-columns": "^4.0.0",
"cli-table3": "^0.6.3",
@@ -77,12 +78,12 @@
"json-parse-even-better-errors": "^3.0.0",
"libnpmaccess": "^7.0.2",
"libnpmdiff": "^5.0.17",
- "libnpmexec": "^5.0.17",
+ "libnpmexec": "^6.0.0",
"libnpmfund": "^4.0.17",
"libnpmhook": "^9.0.3",
"libnpmorg": "^5.0.4",
"libnpmpack": "^5.0.17",
- "libnpmpublish": "^7.2.0",
+ "libnpmpublish": "^7.3.0",
"libnpmsearch": "^6.0.2",
"libnpmteam": "^5.0.3",
"libnpmversion": "^4.0.2",
@@ -93,7 +94,7 @@
"ms": "^2.1.2",
"node-gyp": "^9.3.1",
"nopt": "^7.1.0",
- "npm-audit-report": "^4.0.0",
+ "npm-audit-report": "^5.0.0",
"npm-install-checks": "^6.1.1",
"npm-package-arg": "^10.1.0",
"npm-pick-manifest": "^8.0.1",
@@ -107,9 +108,8 @@
"proc-log": "^3.0.0",
"qrcode-terminal": "^0.12.0",
"read": "^2.1.0",
- "read-package-json": "^6.0.3",
- "read-package-json-fast": "^3.0.2",
"semver": "^7.5.1",
+ "sigstore": "^1.5.0",
"ssri": "^10.0.4",
"tar": "^6.1.14",
"text-table": "~0.2.0",
@@ -175,9 +175,8 @@
"proc-log",
"qrcode-terminal",
"read",
- "read-package-json",
- "read-package-json-fast",
"semver",
+ "sigstore",
"ssri",
"tar",
"text-table",
@@ -191,10 +190,12 @@
"@npmcli/docs": "^1.0.0",
"@npmcli/eslint-config": "^4.0.0",
"@npmcli/fs": "^3.1.0",
- "@npmcli/git": "^4.0.4",
+ "@npmcli/git": "^4.1.0",
+ "@npmcli/mock-globals": "^1.0.0",
"@npmcli/mock-registry": "^1.0.0",
"@npmcli/promise-spawn": "^6.0.2",
"@npmcli/template-oss": "4.14.1",
+ "@tufjs/repo-mock": "^1.3.1",
"licensee": "^10.0.0",
"nock": "^13.3.0",
"npm-packlist": "^7.0.4",
@@ -238,13 +239,15 @@
"--exclude",
"smoke-tests/**",
"--exclude",
+ "mock-globals/**",
+ "--exclude",
"mock-registry/**",
"--exclude",
"workspaces/**",
"--exclude",
"tap-snapshots/**"
],
- "test-ignore": "^(docs|smoke-tests|mock-registry|workspaces)/"
+ "test-ignore": "^(docs|smoke-tests|mock-globals|mock-registry|workspaces)/"
},
"templateOSS": {
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
diff --git a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs
index 4fec8f86c5baa0..7611191688268c 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/audit.js.test.cjs
@@ -175,6 +175,20 @@ audited 1 package in xxx
`
+exports[`test/lib/commands/audit.js TAP audit signatures third-party registry with sub-path (trailing slash) > must match snapshot 1`] = `
+audited 1 package in xxx
+
+1 package has a verified registry signature
+
+`
+
+exports[`test/lib/commands/audit.js TAP audit signatures third-party registry with sub-path > must match snapshot 1`] = `
+audited 1 package in xxx
+
+1 package has a verified registry signature
+
+`
+
exports[`test/lib/commands/audit.js TAP audit signatures with both invalid and missing signatures > must match snapshot 1`] = `
audited 2 packages in xxx
@@ -230,6 +244,13 @@ Someone might have tampered with this package since it was published on the regi
`
+exports[`test/lib/commands/audit.js TAP audit signatures with key fallback to legacy API > must match snapshot 1`] = `
+audited 1 package in xxx
+
+1 package has a verified registry signature
+
+`
+
exports[`test/lib/commands/audit.js TAP audit signatures with keys but missing signature > must match snapshot 1`] = `
audited 1 package in xxx
diff --git a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
index c88888b7cd7d15..93ac959ce44059 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/config.js.test.cjs
@@ -111,12 +111,14 @@ exports[`test/lib/commands/config.js TAP config list --json > output matches sna
"package-lock-only": false,
"pack-destination": ".",
"parseable": false,
+ "prefer-dedupe": false,
"prefer-offline": false,
"prefer-online": false,
"preid": "",
"production": null,
"progress": true,
"provenance": false,
+ "provenance-file": null,
"proxy": null,
"read-only": false,
"rebuild-bundle": true,
@@ -265,6 +267,7 @@ package = []
package-lock = true
package-lock-only = false
parseable = false
+prefer-dedupe = false
prefer-offline = false
prefer-online = false
; prefix = "{REALGLOBALREFIX}" ; overridden by cli
@@ -272,6 +275,7 @@ preid = ""
production = null
progress = true
provenance = false
+provenance-file = null
proxy = null
read-only = false
rebuild-bundle = true
diff --git a/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs
index 533b4f196e6616..e87086d7d9b8fb 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/diff.js.test.cjs
@@ -33,7 +33,7 @@ index v0.1.0..v1.0.0 100644
+++ b/package.json
@@ -1,4 +1,4 @@
{
- "name": "foo",
+ "name": "@npmcli/foo",
- "version": "0.1.0"
+ "version": "1.0.0"
}
diff --git a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
index 7cb836226a76b1..54dd0098597575 100644
--- a/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/commands/publish.js.test.cjs
@@ -99,6 +99,148 @@ exports[`test/lib/commands/publish.js TAP json > new package json 1`] = `
}
`
+exports[`test/lib/commands/publish.js TAP manifest > manifest 1`] = `
+Object {
+ "_id": "npm@{VERSION}",
+ "author": Object {
+ "name": "GitHub Inc.",
+ },
+ "bin": Object {
+ "npm": "bin/npm-cli.js",
+ "npx": "bin/npx-cli.js",
+ },
+ "bugs": Object {
+ "url": "https://github.com/npm/cli/issues",
+ },
+ "description": "a package manager for JavaScript",
+ "directories": Object {
+ "bin": "./bin",
+ "doc": "./doc",
+ "lib": "./lib",
+ "man": "./man",
+ },
+ "exports": Object {
+ ".": Array [
+ Object {
+ "default": "./index.js",
+ },
+ "./index.js",
+ ],
+ "./package.json": "./package.json",
+ },
+ "files": Array [
+ "bin/",
+ "lib/",
+ "index.js",
+ "docs/content/",
+ "docs/output/",
+ "man/",
+ ],
+ "homepage": "https://docs.npmjs.com/",
+ "keywords": Array [
+ "install",
+ "modules",
+ "package manager",
+ "package.json",
+ ],
+ "license": "Artistic-2.0",
+ "main": "./index.js",
+ "man": Array [
+ "man/man1/npm-access.1",
+ "man/man1/npm-adduser.1",
+ "man/man1/npm-audit.1",
+ "man/man1/npm-bugs.1",
+ "man/man1/npm-cache.1",
+ "man/man1/npm-ci.1",
+ "man/man1/npm-completion.1",
+ "man/man1/npm-config.1",
+ "man/man1/npm-dedupe.1",
+ "man/man1/npm-deprecate.1",
+ "man/man1/npm-diff.1",
+ "man/man1/npm-dist-tag.1",
+ "man/man1/npm-docs.1",
+ "man/man1/npm-doctor.1",
+ "man/man1/npm-edit.1",
+ "man/man1/npm-exec.1",
+ "man/man1/npm-explain.1",
+ "man/man1/npm-explore.1",
+ "man/man1/npm-find-dupes.1",
+ "man/man1/npm-fund.1",
+ "man/man1/npm-help-search.1",
+ "man/man1/npm-help.1",
+ "man/man1/npm-hook.1",
+ "man/man1/npm-init.1",
+ "man/man1/npm-install-ci-test.1",
+ "man/man1/npm-install-test.1",
+ "man/man1/npm-install.1",
+ "man/man1/npm-link.1",
+ "man/man1/npm-login.1",
+ "man/man1/npm-logout.1",
+ "man/man1/npm-ls.1",
+ "man/man1/npm-org.1",
+ "man/man1/npm-outdated.1",
+ "man/man1/npm-owner.1",
+ "man/man1/npm-pack.1",
+ "man/man1/npm-ping.1",
+ "man/man1/npm-pkg.1",
+ "man/man1/npm-prefix.1",
+ "man/man1/npm-profile.1",
+ "man/man1/npm-prune.1",
+ "man/man1/npm-publish.1",
+ "man/man1/npm-query.1",
+ "man/man1/npm-rebuild.1",
+ "man/man1/npm-repo.1",
+ "man/man1/npm-restart.1",
+ "man/man1/npm-root.1",
+ "man/man1/npm-run-script.1",
+ "man/man1/npm-search.1",
+ "man/man1/npm-shrinkwrap.1",
+ "man/man1/npm-star.1",
+ "man/man1/npm-stars.1",
+ "man/man1/npm-start.1",
+ "man/man1/npm-stop.1",
+ "man/man1/npm-team.1",
+ "man/man1/npm-test.1",
+ "man/man1/npm-token.1",
+ "man/man1/npm-uninstall.1",
+ "man/man1/npm-unpublish.1",
+ "man/man1/npm-unstar.1",
+ "man/man1/npm-update.1",
+ "man/man1/npm-version.1",
+ "man/man1/npm-view.1",
+ "man/man1/npm-whoami.1",
+ "man/man1/npm.1",
+ "man/man1/npx.1",
+ "man/man5/folders.5",
+ "man/man5/install.5",
+ "man/man5/npm-global.5",
+ "man/man5/npm-json.5",
+ "man/man5/npm-shrinkwrap-json.5",
+ "man/man5/npmrc.5",
+ "man/man5/package-json.5",
+ "man/man5/package-lock-json.5",
+ "man/man7/config.7",
+ "man/man7/dependency-selectors.7",
+ "man/man7/developers.7",
+ "man/man7/logging.7",
+ "man/man7/orgs.7",
+ "man/man7/package-spec.7",
+ "man/man7/registry.7",
+ "man/man7/removal.7",
+ "man/man7/scope.7",
+ "man/man7/scripts.7",
+ "man/man7/workspaces.7",
+ ],
+ "name": "npm",
+ "readmeFilename": "README.md",
+ "repository": Object {
+ "type": "git",
+ "url": "git+https://github.com/npm/cli.git",
+ },
+ "version": "{VERSION}",
+}
+`
+
exports[`test/lib/commands/publish.js TAP no auth dry-run > must match snapshot 1`] = `
+ test-package@1.0.0
`
diff --git a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
index d7a943500e49e5..bd5aa05991ab2d 100644
--- a/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/docs.js.test.cjs
@@ -24,7 +24,7 @@ All commands:
Specify configs in the ini-formatted file:
- /some/config/file/.npmrc
+ {USERCONFIG}
or on the command line via: npm --key=value
More configuration info: npm help config
@@ -165,6 +165,10 @@ Array [
]
`
+exports[`test/lib/docs.js TAP command list > deref 1`] = `
+Function deref(c)
+`
+
exports[`test/lib/docs.js TAP config > all definitions 1`] = `
#### \`_auth\`
@@ -179,6 +183,8 @@ Warning: This should generally not be set via a command-line option. It is
safer to use a registry-provided authentication bearer token stored in the
~/.npmrc file by running \`npm login\`.
+
+
#### \`access\`
* Default: 'public' for new packages, existing packages it will not change the
@@ -195,6 +201,8 @@ packages. Specifying a value of \`restricted\` or \`public\` during publish will
change the access for an existing package the same way that \`npm access set
status\` would.
+
+
#### \`all\`
* Default: false
@@ -204,6 +212,8 @@ When running \`npm outdated\` and \`npm ls\`, setting \`--all\` will show all
outdated or installed packages, rather than only those directly depended
upon by the current project.
+
+
#### \`allow-same-version\`
* Default: false
@@ -212,6 +222,8 @@ upon by the current project.
Prevents throwing an error when \`npm version\` is used to set the new version
to the same value as the current version.
+
+
#### \`audit\`
* Default: true
@@ -222,6 +234,8 @@ default registry and all registries configured for scopes. See the
documentation for [\`npm audit\`](/commands/npm-audit) for details on what is
submitted.
+
+
#### \`audit-level\`
* Default: null
@@ -230,6 +244,8 @@ submitted.
The minimum level of vulnerability for \`npm audit\` to exit with a non-zero
exit code.
+
+
#### \`auth-type\`
* Default: "web"
@@ -238,6 +254,8 @@ exit code.
What authentication strategy to use with \`login\`. Note that if an \`otp\`
config is given, this value will always be set to \`legacy\`.
+
+
#### \`before\`
* Default: null
@@ -253,6 +271,8 @@ If the requested version is a \`dist-tag\` and the given tag does not pass the
will be used. For example, \`foo@latest\` might install \`foo@1.2\` even though
\`latest\` is \`2.0\`.
+
+
#### \`bin-links\`
* Default: true
@@ -265,6 +285,8 @@ Set to false to have it not do this. This can be used to work around the
fact that some file systems don't support symlinks, even on ostensibly Unix
systems.
+
+
#### \`browser\`
* Default: OS X: \`"open"\`, Windows: \`"start"\`, Others: \`"xdg-open"\`
@@ -277,6 +299,8 @@ terminal.
Set to \`true\` to use default system URL opener.
+
+
#### \`ca\`
* Default: null
@@ -303,6 +327,8 @@ ca[]="..."
See also the \`strict-ssl\` config.
+
+
#### \`cache\`
* Default: Windows: \`%LocalAppData%\\npm-cache\`, Posix: \`~/.npm\`
@@ -310,6 +336,8 @@ See also the \`strict-ssl\` config.
The location of npm's cache directory.
+
+
#### \`cafile\`
* Default: null
@@ -319,6 +347,8 @@ A path to a file containing one or multiple Certificate Authority signing
certificates. Similar to the \`ca\` setting, but allows for multiple CA's, as
well as for the CA information to be stored in a file on disk.
+
+
#### \`call\`
* Default: ""
@@ -332,6 +362,7 @@ npm exec --package yo --package generator-node --call "yo node"
\`\`\`
+
#### \`cidr\`
* Default: null
@@ -340,6 +371,8 @@ npm exec --package yo --package generator-node --call "yo node"
This is a list of CIDR address to be used when configuring limited access
tokens with the \`npm token create\` command.
+
+
#### \`color\`
* Default: true unless the NO_COLOR environ is set to something other than '0'
@@ -348,6 +381,8 @@ tokens with the \`npm token create\` command.
If false, never shows colors. If \`"always"\` then always shows colors. If
true, then only prints color codes for tty file descriptors.
+
+
#### \`commit-hooks\`
* Default: true
@@ -355,6 +390,8 @@ true, then only prints color codes for tty file descriptors.
Run git commit hooks when using the \`npm version\` command.
+
+
#### \`depth\`
* Default: \`Infinity\` if \`--all\` is set, otherwise \`1\`
@@ -365,6 +402,8 @@ The depth to go when recursing packages for \`npm ls\`.
If not set, \`npm ls\` will show only the immediate dependencies of the root
project. If \`--all\` is set, then npm will show all dependencies by default.
+
+
#### \`description\`
* Default: true
@@ -372,6 +411,8 @@ project. If \`--all\` is set, then npm will show all dependencies by default.
Show the description in \`npm search\`
+
+
#### \`diff\`
* Default:
@@ -379,6 +420,8 @@ Show the description in \`npm search\`
Define arguments to compare in \`npm diff\`.
+
+
#### \`diff-dst-prefix\`
* Default: "b/"
@@ -386,6 +429,8 @@ Define arguments to compare in \`npm diff\`.
Destination prefix to be used in \`npm diff\` output.
+
+
#### \`diff-ignore-all-space\`
* Default: false
@@ -393,6 +438,8 @@ Destination prefix to be used in \`npm diff\` output.
Ignore whitespace when comparing lines in \`npm diff\`.
+
+
#### \`diff-name-only\`
* Default: false
@@ -400,6 +447,8 @@ Ignore whitespace when comparing lines in \`npm diff\`.
Prints only filenames when using \`npm diff\`.
+
+
#### \`diff-no-prefix\`
* Default: false
@@ -410,6 +459,8 @@ Do not show any source or destination prefix in \`npm diff\` output.
Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and
\`--diff-dst-prefix\` configs.
+
+
#### \`diff-src-prefix\`
* Default: "a/"
@@ -417,6 +468,8 @@ Note: this causes \`npm diff\` to ignore the \`--diff-src-prefix\` and
Source prefix to be used in \`npm diff\` output.
+
+
#### \`diff-text\`
* Default: false
@@ -424,6 +477,8 @@ Source prefix to be used in \`npm diff\` output.
Treat all files as text in \`npm diff\`.
+
+
#### \`diff-unified\`
* Default: 3
@@ -431,6 +486,8 @@ Treat all files as text in \`npm diff\`.
The number of lines of context to print in \`npm diff\`.
+
+
#### \`dry-run\`
* Default: false
@@ -444,6 +501,8 @@ commands that modify your local installation, eg, \`install\`, \`update\`,
Note: This is NOT honored by other network related commands, eg \`dist-tags\`,
\`owner\`, etc.
+
+
#### \`editor\`
* Default: The EDITOR or VISUAL environment variables, or
@@ -452,6 +511,8 @@ Note: This is NOT honored by other network related commands, eg \`dist-tags\`,
The command to run for \`npm edit\` and \`npm config edit\`.
+
+
#### \`engine-strict\`
* Default: false
@@ -463,6 +524,8 @@ Node.js version.
This can be overridden by setting the \`--force\` flag.
+
+
#### \`fetch-retries\`
* Default: 2
@@ -474,6 +537,8 @@ from the registry.
npm will retry idempotent read requests to the registry in the case of
network failures or 5xx HTTP errors.
+
+
#### \`fetch-retry-factor\`
* Default: 10
@@ -481,6 +546,8 @@ network failures or 5xx HTTP errors.
The "factor" config for the \`retry\` module to use when fetching packages.
+
+
#### \`fetch-retry-maxtimeout\`
* Default: 60000 (1 minute)
@@ -489,6 +556,8 @@ The "factor" config for the \`retry\` module to use when fetching packages.
The "maxTimeout" config for the \`retry\` module to use when fetching
packages.
+
+
#### \`fetch-retry-mintimeout\`
* Default: 10000 (10 seconds)
@@ -497,6 +566,8 @@ packages.
The "minTimeout" config for the \`retry\` module to use when fetching
packages.
+
+
#### \`fetch-timeout\`
* Default: 300000 (5 minutes)
@@ -504,6 +575,8 @@ packages.
The maximum amount of time to wait for HTTP requests to complete.
+
+
#### \`force\`
* Default: false
@@ -530,6 +603,8 @@ mistakes, unnecessary performance degradation, and malicious input.
If you don't have a clear idea of what you want to do, it is strongly
recommended that you do not use this option!
+
+
#### \`foreground-scripts\`
* Default: false
@@ -542,6 +617,8 @@ input, output, and error with the main npm process.
Note that this will generally make installs run slower, and be much noisier,
but can be useful for debugging.
+
+
#### \`format-package-lock\`
* Default: true
@@ -550,6 +627,8 @@ but can be useful for debugging.
Format \`package-lock.json\` or \`npm-shrinkwrap.json\` as a human readable
file.
+
+
#### \`fund\`
* Default: true
@@ -559,6 +638,8 @@ When "true" displays the message at the end of each \`npm install\`
acknowledging the number of dependencies looking for funding. See [\`npm
fund\`](/commands/npm-fund) for details.
+
+
#### \`git\`
* Default: "git"
@@ -567,6 +648,8 @@ fund\`](/commands/npm-fund) for details.
The command to use for git commands. If git is installed on the computer,
but is not in the \`PATH\`, then set this to the full path to the git binary.
+
+
#### \`git-tag-version\`
* Default: true
@@ -575,6 +658,8 @@ but is not in the \`PATH\`, then set this to the full path to the git binary.
Tag the commit when using the \`npm version\` command. Setting this to false
results in no commit being made at all.
+
+
#### \`global\`
* Default: false
@@ -589,6 +674,8 @@ folder instead of the current working directory. See
* bin files are linked to \`{prefix}/bin\`
* man pages are linked to \`{prefix}/share/man\`
+
+
#### \`globalconfig\`
* Default: The global --prefix setting plus 'etc/npmrc'. For example,
@@ -597,6 +684,8 @@ folder instead of the current working directory. See
The config file to read for global config options.
+
+
#### \`heading\`
* Default: "npm"
@@ -604,6 +693,8 @@ The config file to read for global config options.
The string that starts all the debugging log output.
+
+
#### \`https-proxy\`
* Default: null
@@ -614,6 +705,8 @@ A proxy to use for outgoing https requests. If the \`HTTPS_PROXY\` or
proxy settings will be honored by the underlying \`make-fetch-happen\`
library.
+
+
#### \`if-present\`
* Default: false
@@ -640,6 +733,8 @@ Note that commands explicitly intended to run a particular script, such as
will still run their intended script if \`ignore-scripts\` is set, but they
will *not* run any pre- or post-scripts.
+
+
#### \`include\`
* Default:
@@ -652,6 +747,8 @@ This is the inverse of \`--omit=\`.
Dependency types specified in \`--include\` will not be omitted, regardless of
the order in which omit/include are specified on the command-line.
+
+
#### \`include-staged\`
* Default: false
@@ -662,6 +759,8 @@ Allow installing "staged" published packages, as defined by [npm RFC PR
This is experimental, and not implemented by the npm public registry.
+
+
#### \`include-workspace-root\`
* Default: false
@@ -682,6 +781,8 @@ This value is not exported to the environment for child processes.
The value \`npm init\` should use by default for the package author's email.
+
+
#### \`init-author-name\`
* Default: ""
@@ -689,6 +790,8 @@ The value \`npm init\` should use by default for the package author's email.
The value \`npm init\` should use by default for the package author's name.
+
+
#### \`init-author-url\`
* Default: ""
@@ -697,6 +800,8 @@ The value \`npm init\` should use by default for the package author's name.
The value \`npm init\` should use by default for the package author's
homepage.
+
+
#### \`init-license\`
* Default: "ISC"
@@ -704,6 +809,8 @@ homepage.
The value \`npm init\` should use by default for the package license.
+
+
#### \`init-module\`
* Default: "~/.npm-init.js"
@@ -714,6 +821,8 @@ documentation for the
[init-package-json](https://github.com/npm/init-package-json) module for
more information, or [npm init](/commands/npm-init).
+
+
#### \`init-version\`
* Default: "1.0.0"
@@ -722,6 +831,8 @@ more information, or [npm init](/commands/npm-init).
The value that \`npm init\` should use by default for the package version
number, if not already set in package.json.
+
+
#### \`install-links\`
* Default: false
@@ -731,6 +842,8 @@ When set file: protocol dependencies will be packed and installed as regular
dependencies instead of creating a symlink. This option has no effect on
workspaces.
+
+
#### \`install-strategy\`
* Default: "hoisted"
@@ -743,6 +856,8 @@ place, no hoisting. shallow (formerly --global-style) only install direct
deps at top-level. linked: (experimental) install in node_modules/.store,
link in place, unhoisted.
+
+
#### \`json\`
* Default: false
@@ -755,6 +870,8 @@ Whether or not to output JSON data, rather than the normal output.
Not supported by all npm commands.
+
+
#### \`legacy-peer-deps\`
* Default: false
@@ -773,6 +890,8 @@ This differs from \`--omit=peer\`, in that \`--omit=peer\` will avoid unpacking
Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the
\`peerDependencies\` contract that meta-dependencies may rely on.
+
+
#### \`link\`
* Default: false
@@ -780,6 +899,8 @@ Use of \`legacy-peer-deps\` is not recommended, as it will not enforce the
Used with \`npm ls\`, limiting output to only those packages that are linked.
+
+
#### \`local-address\`
* Default: null
@@ -788,6 +909,8 @@ Used with \`npm ls\`, limiting output to only those packages that are linked.
The IP address of the local interface to use when making connections to the
npm registry. Must be IPv4 in versions of Node prior to 0.12.
+
+
#### \`location\`
* Default: "user" unless \`--global\` is passed, which will also set this value
@@ -805,6 +928,8 @@ instead of the current working directory. See
* bin files are linked to \`{prefix}/bin\`
* man pages are linked to \`{prefix}/share/man\`
+
+
#### \`lockfile-version\`
* Default: Version 3 if no lockfile, auto-converting v1 lockfiles to v3,
@@ -827,6 +952,8 @@ determinism and interoperability, at the expense of more bytes on disk.
disk than lockfile version 2, but not interoperable with older npm versions.
Ideal if all users are on npm version 7 and higher.
+
+
#### \`loglevel\`
* Default: "notice"
@@ -841,6 +968,8 @@ Any logs of a higher level than the setting are shown. The default is
See also the \`foreground-scripts\` config.
+
+
#### \`logs-dir\`
* Default: A directory named \`_logs\` inside the cache
@@ -849,6 +978,8 @@ See also the \`foreground-scripts\` config.
The location of npm's log directory. See [\`npm logging\`](/using-npm/logging)
for more information.
+
+
#### \`logs-max\`
* Default: 10
@@ -858,6 +989,8 @@ The maximum number of log files to store.
If set to 0, no log files will be written for the current run.
+
+
#### \`long\`
* Default: false
@@ -865,6 +998,8 @@ If set to 0, no log files will be written for the current run.
Show extended information in \`ls\`, \`search\`, and \`help-search\`.
+
+
#### \`maxsockets\`
* Default: 15
@@ -873,6 +1008,8 @@ Show extended information in \`ls\`, \`search\`, and \`help-search\`.
The maximum number of connections to use per origin (protocol/host/port
combination).
+
+
#### \`message\`
* Default: "%s"
@@ -882,6 +1019,8 @@ Commit message which is used by \`npm version\` when creating version commit.
Any "%s" in the message will be replaced with the version number.
+
+
#### \`node-options\`
* Default: null
@@ -891,6 +1030,8 @@ Options to pass through to Node.js via the \`NODE_OPTIONS\` environment
variable. This does not impact how npm itself is executed but it does impact
how lifecycle scripts are called.
+
+
#### \`noproxy\`
* Default: The value of the NO_PROXY environment variable
@@ -900,6 +1041,8 @@ Domain extensions that should bypass any proxies.
Also accepts a comma-delimited string.
+
+
#### \`offline\`
* Default: false
@@ -908,6 +1051,8 @@ Also accepts a comma-delimited string.
Force offline mode: no network requests will be done during install. To
allow the CLI to fill in missing cache data, see \`--prefer-offline\`.
+
+
#### \`omit\`
* Default: 'dev' if the \`NODE_ENV\` environment variable is set to
@@ -926,6 +1071,8 @@ it will be included.
If the resulting omit list includes \`'dev'\`, then the \`NODE_ENV\` environment
variable will be set to \`'production'\` for all lifecycle scripts.
+
+
#### \`omit-lockfile-registry-resolved\`
* Default: false
@@ -936,6 +1083,8 @@ registry dependencies. Subsequent installs will need to resolve tarball
endpoints with the configured registry, likely resulting in a longer install
time.
+
+
#### \`otp\`
* Default: null
@@ -947,6 +1096,8 @@ when publishing or changing package permissions with \`npm access\`.
If not set, and a registry response fails with a challenge for a one-time
password, npm will prompt on the command line for one.
+
+
#### \`pack-destination\`
* Default: "."
@@ -954,6 +1105,8 @@ password, npm will prompt on the command line for one.
Directory in which \`npm pack\` will save tarballs.
+
+
#### \`package\`
* Default:
@@ -961,6 +1114,8 @@ Directory in which \`npm pack\` will save tarballs.
The package or packages to install for [\`npm exec\`](/commands/npm-exec)
+
+
#### \`package-lock\`
* Default: true
@@ -969,7 +1124,7 @@ The package or packages to install for [\`npm exec\`](/commands/npm-exec)
If set to false, then ignore \`package-lock.json\` files when installing. This
will also prevent _writing_ \`package-lock.json\` if \`save\` is true.
-This configuration does not affect \`npm ci\`.
+
#### \`package-lock-only\`
@@ -985,6 +1140,8 @@ instead of checking \`node_modules\` and downloading dependencies.
For \`list\` this means the output will be based on the tree described by the
\`package-lock.json\`, rather than the contents of \`node_modules\`.
+
+
#### \`parseable\`
* Default: false
@@ -993,6 +1150,18 @@ For \`list\` this means the output will be based on the tree described by the
Output parseable results from commands that write to standard output. For
\`npm search\`, this will be tab-separated table format.
+
+
+#### \`prefer-dedupe\`
+
+* Default: false
+* Type: Boolean
+
+Prefer to deduplicate packages if possible, rather than choosing a newer
+version of a dependency.
+
+
+
#### \`prefer-offline\`
* Default: false
@@ -1002,6 +1171,8 @@ If true, staleness checks for cached data will be bypassed, but missing data
will be requested from the server. To force full offline mode, use
\`--offline\`.
+
+
#### \`prefer-online\`
* Default: false
@@ -1010,6 +1181,8 @@ will be requested from the server. To force full offline mode, use
If true, staleness checks for cached data will be forced, making the CLI
look for updates immediately even for fresh package data.
+
+
#### \`prefix\`
* Default: In global mode, the folder where the node executable is installed.
@@ -1020,6 +1193,8 @@ look for updates immediately even for fresh package data.
The location to install global items. If set on the command line, then it
forces non-global commands to run in the specified folder.
+
+
#### \`preid\`
* Default: ""
@@ -1028,6 +1203,8 @@ forces non-global commands to run in the specified folder.
The "prerelease identifier" to use as a prefix for the "prerelease" part of
a semver. Like the \`rc\` in \`1.2.0-rc.8\`.
+
+
#### \`progress\`
* Default: \`true\` unless running in a known CI system
@@ -1038,6 +1215,8 @@ operations, if \`process.stderr\` is a TTY.
Set to \`false\` to suppress the progress bar.
+
+
#### \`provenance\`
* Default: false
@@ -1046,6 +1225,17 @@ Set to \`false\` to suppress the progress bar.
When publishing from a supported cloud CI/CD system, the package will be
publicly linked to where it was built and published from.
+This config can not be used with: \`provenance-file\`
+
+#### \`provenance-file\`
+
+* Default: null
+* Type: Path
+
+When publishing, the provenance bundle at the given path will be used.
+
+This config can not be used with: \`provenance\`
+
#### \`proxy\`
* Default: null
@@ -1055,6 +1245,8 @@ A proxy to use for outgoing http requests. If the \`HTTP_PROXY\` or
\`http_proxy\` environment variables are set, proxy settings will be honored
by the underlying \`request\` library.
+
+
#### \`read-only\`
* Default: false
@@ -1063,6 +1255,8 @@ by the underlying \`request\` library.
This is used to mark a token as unable to publish when configuring limited
access tokens with the \`npm token create\` command.
+
+
#### \`rebuild-bundle\`
* Default: true
@@ -1070,6 +1264,8 @@ access tokens with the \`npm token create\` command.
Rebuild bundled dependencies after installation.
+
+
#### \`registry\`
* Default: "https://registry.npmjs.org/"
@@ -1077,6 +1273,8 @@ Rebuild bundled dependencies after installation.
The base URL of the npm registry.
+
+
#### \`replace-registry-host\`
* Default: "npmjs"
@@ -1092,6 +1290,8 @@ registry host with the configured host every time.
You may also specify a bare hostname (e.g., "registry.npmjs.org").
+
+
#### \`save\`
* Default: \`true\` unless when using \`npm update\` where it defaults to \`false\`
@@ -1104,6 +1304,8 @@ When used with the \`npm rm\` command, removes the dependency from
Will also prevent writing to \`package-lock.json\` if set to \`false\`.
+
+
#### \`save-bundle\`
* Default: false
@@ -1115,6 +1317,8 @@ If a package would be saved at install time by the use of \`--save\`,
Ignored if \`--save-peer\` is set, since peerDependencies cannot be bundled.
+
+
#### \`save-dev\`
* Default: false
@@ -1122,6 +1326,8 @@ Ignored if \`--save-peer\` is set, since peerDependencies cannot be bundled.
Save installed packages to a package.json file as \`devDependencies\`.
+
+
#### \`save-exact\`
* Default: false
@@ -1130,6 +1336,8 @@ Save installed packages to a package.json file as \`devDependencies\`.
Dependencies saved to package.json will be configured with an exact version
rather than using npm's default semver range operator.
+
+
#### \`save-optional\`
* Default: false
@@ -1137,6 +1345,8 @@ rather than using npm's default semver range operator.
Save installed packages to a package.json file as \`optionalDependencies\`.
+
+
#### \`save-peer\`
* Default: false
@@ -1144,6 +1354,8 @@ Save installed packages to a package.json file as \`optionalDependencies\`.
Save installed packages to a package.json file as \`peerDependencies\`
+
+
#### \`save-prefix\`
* Default: "^"
@@ -1157,6 +1369,8 @@ to \`^1.2.3\` which allows minor upgrades for that package, but after \`npm
config set save-prefix='~'\` it would be set to \`~1.2.3\` which only allows
patch upgrades.
+
+
#### \`save-prod\`
* Default: false
@@ -1169,6 +1383,8 @@ you want to move it to be a non-optional production dependency.
This is the default behavior if \`--save\` is true, and neither \`--save-dev\`
or \`--save-optional\` are true.
+
+
#### \`scope\`
* Default: the scope of the current project, if any, or ""
@@ -1199,6 +1415,7 @@ npm init --scope=@foo --yes
\`\`\`
+
#### \`script-shell\`
* Default: '/bin/sh' on POSIX systems, 'cmd.exe' on Windows
@@ -1207,6 +1424,8 @@ npm init --scope=@foo --yes
The shell to use for scripts run with the \`npm exec\`, \`npm run\` and \`npm
init \` commands.
+
+
#### \`searchexclude\`
* Default: ""
@@ -1214,6 +1433,8 @@ init \` commands.
Space-separated options that limit the results from search.
+
+
#### \`searchlimit\`
* Default: 20
@@ -1222,6 +1443,8 @@ Space-separated options that limit the results from search.
Number of items to limit search results to. Will not apply at all to legacy
searches.
+
+
#### \`searchopts\`
* Default: ""
@@ -1229,6 +1452,8 @@ searches.
Space-separated options that are always passed to search.
+
+
#### \`searchstaleness\`
* Default: 900
@@ -1237,6 +1462,8 @@ Space-separated options that are always passed to search.
The age of the cache, in seconds, before another registry request is made if
using legacy search endpoint.
+
+
#### \`shell\`
* Default: SHELL environment variable, or "bash" on Posix, or "cmd.exe" on
@@ -1245,6 +1472,8 @@ using legacy search endpoint.
The shell to run for the \`npm explore\` command.
+
+
#### \`sign-git-commit\`
* Default: false
@@ -1256,6 +1485,8 @@ version using \`-S\` to add a signature.
Note that git requires you to have set up GPG keys in your git configs for
this to work properly.
+
+
#### \`sign-git-tag\`
* Default: false
@@ -1267,6 +1498,8 @@ If set to true, then the \`npm version\` command will tag the version using
Note that git requires you to have set up GPG keys in your git configs for
this to work properly.
+
+
#### \`strict-peer-deps\`
* Default: false
@@ -1286,6 +1519,8 @@ When such an override is performed, a warning is printed, explaining the
conflict and the packages involved. If \`--strict-peer-deps\` is set, then
this warning is treated as a failure.
+
+
#### \`strict-ssl\`
* Default: true
@@ -1296,6 +1531,8 @@ via https.
See also the \`ca\` config.
+
+
#### \`tag\`
* Default: "latest"
@@ -1310,6 +1547,8 @@ command, if no explicit tag is given.
When used by the \`npm diff\` command, this is the tag used to fetch the
tarball that will be compared with the local files by default.
+
+
#### \`tag-version-prefix\`
* Default: "v"
@@ -1323,6 +1562,8 @@ Because other tools may rely on the convention that npm version tags look
like \`v1.0.0\`, _only use this property if it is absolutely necessary_. In
particular, use care when overriding this setting for public packages.
+
+
#### \`timing\`
* Default: false
@@ -1337,6 +1578,8 @@ You can quickly view it with this [json](https://npm.im/json) command line:
Timing information will also be reported in the terminal. To suppress this
while still writing the timing file, use \`--silent\`.
+
+
#### \`umask\`
* Default: 0
@@ -1357,6 +1600,8 @@ Thus, the effective default umask value on most POSIX systems is 0o22,
meaning that folders and executables are created with a mode of 0o755 and
other files are created with a mode of 0o644.
+
+
#### \`unicode\`
* Default: false on windows, true on mac/unix systems with a unicode locale,
@@ -1366,6 +1611,8 @@ other files are created with a mode of 0o644.
When set to true, npm uses unicode characters in the tree output. When
false, it uses ascii characters instead of unicode glyphs.
+
+
#### \`update-notifier\`
* Default: true
@@ -1374,6 +1621,8 @@ false, it uses ascii characters instead of unicode glyphs.
Set to false to suppress the update notification when using an older version
of npm than the latest.
+
+
#### \`usage\`
* Default: false
@@ -1381,6 +1630,8 @@ of npm than the latest.
Show short usage output about the command specified.
+
+
#### \`user-agent\`
* Default: "npm/{npm-version} node/{node-version} {platform} {arch}
@@ -1399,6 +1650,8 @@ their actual counterparts:
* \`{ci}\` - The value of the \`ci-name\` config, if set, prefixed with \`ci/\`, or
an empty string if \`ci-name\` is empty.
+
+
#### \`userconfig\`
* Default: "~/.npmrc"
@@ -1410,6 +1663,8 @@ This may be overridden by the \`npm_config_userconfig\` environment variable
or the \`--userconfig\` command line option, but may _not_ be overridden by
settings in the \`globalconfig\` file.
+
+
#### \`version\`
* Default: false
@@ -1419,6 +1674,8 @@ If true, output the npm version and exit successfully.
Only relevant when specified explicitly on the command line.
+
+
#### \`versions\`
* Default: false
@@ -1430,6 +1687,8 @@ exists, and exit successfully.
Only relevant when specified explicitly on the command line.
+
+
#### \`viewer\`
* Default: "man" on Posix, "browser" on Windows
@@ -1439,6 +1698,8 @@ The program to use to view help content.
Set to \`"browser"\` to view html help content in the default web browser.
+
+
#### \`which\`
* Default: null
@@ -1446,6 +1707,8 @@ Set to \`"browser"\` to view html help content in the default web browser.
If there are multiple funding sources, which 1-indexed source URL to open.
+
+
#### \`workspace\`
* Default:
@@ -1494,6 +1757,8 @@ This value is not exported to the environment for child processes.
If set to true, the npm cli will run an update after operations that may
possibly change the workspaces installed to the \`node_modules\` folder.
+
+
#### \`yes\`
* Default: null
@@ -1502,6 +1767,8 @@ possibly change the workspaces installed to the \`node_modules\` folder.
Automatically answer "yes" to any prompts that npm might print on the
command line.
+
+
#### \`also\`
* Default: null
@@ -1510,6 +1777,8 @@ command line.
When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`.
+
+
#### \`cache-max\`
* Default: Infinity
@@ -1518,6 +1787,8 @@ When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`.
\`--cache-max=0\` is an alias for \`--prefer-online\`
+
+
#### \`cache-min\`
* Default: 0
@@ -1526,6 +1797,8 @@ When set to \`dev\` or \`development\`, this is an alias for \`--include=dev\`.
\`--cache-min=9999 (or bigger)\` is an alias for \`--prefer-offline\`.
+
+
#### \`cert\`
* Default: null
@@ -1547,6 +1820,8 @@ It is _not_ the path to a certificate file, though you can set a
registry-scoped "certfile" path like
"//other-registry.tld/:certfile=/path/to/cert.pem".
+
+
#### \`ci-name\`
* Default: The name of the current CI system, or \`null\` when not on a known CI
@@ -1559,6 +1834,8 @@ The name of a continuous integration system. If not set explicitly, npm will
detect the current CI environment using the
[\`ci-info\`](http://npm.im/ci-info) module.
+
+
#### \`dev\`
* Default: false
@@ -1567,6 +1844,8 @@ detect the current CI environment using the
Alias for \`--include=dev\`.
+
+
#### \`global-style\`
* Default: false
@@ -1577,6 +1856,8 @@ Alias for \`--include=dev\`.
Only install direct dependencies in the top level \`node_modules\`, but hoist
on deeper dependencies. Sets \`--install-strategy=shallow\`.
+
+
#### \`init.author.email\`
* Default: ""
@@ -1585,6 +1866,8 @@ on deeper dependencies. Sets \`--install-strategy=shallow\`.
Alias for \`--init-author-email\`
+
+
#### \`init.author.name\`
* Default: ""
@@ -1593,6 +1876,8 @@ Alias for \`--init-author-email\`
Alias for \`--init-author-name\`
+
+
#### \`init.author.url\`
* Default: ""
@@ -1601,6 +1886,8 @@ Alias for \`--init-author-name\`
Alias for \`--init-author-url\`
+
+
#### \`init.license\`
* Default: "ISC"
@@ -1609,6 +1896,8 @@ Alias for \`--init-author-url\`
Alias for \`--init-license\`
+
+
#### \`init.module\`
* Default: "~/.npm-init.js"
@@ -1617,6 +1906,8 @@ Alias for \`--init-license\`
Alias for \`--init-module\`
+
+
#### \`init.version\`
* Default: "1.0.0"
@@ -1625,6 +1916,8 @@ Alias for \`--init-module\`
Alias for \`--init-version\`
+
+
#### \`key\`
* Default: null
@@ -1644,6 +1937,8 @@ key="-----BEGIN PRIVATE KEY-----\\nXXXX\\nXXXX\\n-----END PRIVATE KEY-----"
It is _not_ the path to a key file, though you can set a registry-scoped
"keyfile" path like "//other-registry.tld/:keyfile=/path/to/key.pem".
+
+
#### \`legacy-bundling\`
* Default: false
@@ -1656,6 +1951,8 @@ the same manner that they are depended on. This may cause very deep
directory structures and duplicate package installs as there is no
de-duplicating. Sets \`--install-strategy=nested\`.
+
+
#### \`only\`
* Default: null
@@ -1664,6 +1961,8 @@ de-duplicating. Sets \`--install-strategy=nested\`.
When set to \`prod\` or \`production\`, this is an alias for \`--omit=dev\`.
+
+
#### \`optional\`
* Default: null
@@ -1675,6 +1974,8 @@ Default value does install optional deps unless otherwise omitted.
Alias for --include=optional or --omit=optional
+
+
#### \`production\`
* Default: null
@@ -1683,6 +1984,8 @@ Alias for --include=optional or --omit=optional
Alias for \`--omit=dev\`
+
+
#### \`shrinkwrap\`
* Default: true
@@ -1691,6 +1994,8 @@ Alias for \`--omit=dev\`
Alias for --package-lock
+
+
#### \`tmp\`
* Default: The value returned by the Node.js \`os.tmpdir()\` method
@@ -1702,6 +2007,8 @@ Alias for --package-lock
Historically, the location where temporary files were stored. No longer
relevant.
+
+
`
exports[`test/lib/docs.js TAP config > all keys 1`] = `
@@ -1804,6 +2111,7 @@ Array [
"package-lock-only",
"pack-destination",
"parseable",
+ "prefer-dedupe",
"prefer-offline",
"prefer-online",
"prefix",
@@ -1811,6 +2119,7 @@ Array [
"production",
"progress",
"provenance",
+ "provenance-file",
"proxy",
"read-only",
"rebuild-bundle",
@@ -1940,12 +2249,14 @@ Array [
"package-lock-only",
"pack-destination",
"parseable",
+ "prefer-dedupe",
"prefer-offline",
"prefer-online",
"preid",
"production",
"progress",
"provenance",
+ "provenance-file",
"proxy",
"read-only",
"rebuild-bundle",
@@ -2015,6 +2326,129 @@ Array [
]
`
+exports[`test/lib/docs.js TAP flat options > full flat options object 1`] = `
+Object {
+ "_auth": null,
+ "access": null,
+ "all": false,
+ "allowSameVersion": false,
+ "audit": true,
+ "auditLevel": null,
+ "authType": "web",
+ "before": null,
+ "binLinks": true,
+ "browser": null,
+ "ca": null,
+ "cache": "{CWD}/cache/_cacache",
+ "call": "",
+ "cert": null,
+ "cidr": null,
+ "ciName": "{ci}",
+ "color": false,
+ "commitHooks": true,
+ "defaultTag": "latest",
+ "depth": null,
+ "diff": Array [],
+ "diffDstPrefix": "b/",
+ "diffIgnoreAllSpace": false,
+ "diffNameOnly": false,
+ "diffNoPrefix": false,
+ "diffSrcPrefix": "a/",
+ "diffText": false,
+ "diffUnified": 3,
+ "dryRun": false,
+ "editor": "{EDITOR}",
+ "engineStrict": false,
+ "force": false,
+ "foregroundScripts": false,
+ "formatPackageLock": true,
+ "fund": true,
+ "git": "git",
+ "gitTagVersion": true,
+ "global": false,
+ "globalconfig": "{CWD}/global/etc/npmrc",
+ "hashAlgorithm": "sha1",
+ "heading": "npm",
+ "httpsProxy": null,
+ "ifPresent": false,
+ "ignoreScripts": false,
+ "includeStaged": false,
+ "includeWorkspaceRoot": false,
+ "installLinks": false,
+ "installStrategy": "hoisted",
+ "json": false,
+ "key": null,
+ "legacyPeerDeps": false,
+ "localAddress": null,
+ "location": "user",
+ "lockfileVersion": null,
+ "logColor": false,
+ "maxSockets": 15,
+ "message": "%s",
+ "nodeBin": "{NODE}",
+ "nodeVersion": "2.2.2",
+ "noProxy": "",
+ "npmBin": "{CWD}/{TESTDIR}/docs.js",
+ "npmCommand": "version",
+ "npmVersion": "1.1.1",
+ "npxCache": "{CWD}/cache/_npx",
+ "offline": false,
+ "omit": Array [],
+ "omitLockfileRegistryResolved": false,
+ "otp": null,
+ "package": Array [],
+ "packageLock": true,
+ "packageLockOnly": false,
+ "packDestination": ".",
+ "parseable": false,
+ "preferDedupe": false,
+ "preferOffline": false,
+ "preferOnline": false,
+ "preid": "",
+ "progress": false,
+ "projectScope": "",
+ "provenance": false,
+ "provenanceFile": null,
+ "proxy": null,
+ "readOnly": false,
+ "rebuildBundle": true,
+ "registry": "https://registry.npmjs.org/",
+ "replaceRegistryHost": "npmjs",
+ "retry": Object {
+ "factor": 10,
+ "maxTimeout": 60000,
+ "minTimeout": 10000,
+ "retries": 0,
+ },
+ "save": true,
+ "saveBundle": false,
+ "savePrefix": "^",
+ "scope": "",
+ "scriptShell": undefined,
+ "search": Object {
+ "description": true,
+ "exclude": "",
+ "limit": 20,
+ "opts": Null Object {},
+ "staleness": 900,
+ },
+ "shell": "{SHELL}",
+ "signGitCommit": false,
+ "signGitTag": false,
+ "silent": false,
+ "strictPeerDeps": false,
+ "strictSSL": true,
+ "tagVersionPrefix": "v",
+ "timeout": 300000,
+ "tufCache": "{CWD}/cache/_tuf",
+ "umask": 0,
+ "unicode": false,
+ "userAgent": "npm/1.1.1 node/2.2.2 {PLATFORM} {ARCH} workspaces/false ci/{ci}",
+ "workspacesEnabled": true,
+ "workspacesUpdate": true,
+}
+`
+
exports[`test/lib/docs.js TAP shorthands > docs 1`] = `
* \`-a\`: \`--all\`
* \`--enjoy-by\`: \`--before\`
@@ -2215,8 +2649,8 @@ npm ci
Options:
[--install-strategy ] [--legacy-bundling]
[--global-style] [--omit [--omit ...]]
-[--strict-peer-deps] [--no-package-lock] [--foreground-scripts]
-[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run]
+[--strict-peer-deps] [--foreground-scripts] [--ignore-scripts] [--no-audit]
+[--no-bin-links] [--no-fund] [--dry-run]
[-w|--workspace [-w|--workspace ...]]
[-ws|--workspaces] [--include-workspace-root] [--install-links]
@@ -2235,7 +2669,6 @@ aliases: clean-install, ic, install-clean, isntall-clean
#### \`global-style\`
#### \`omit\`
#### \`strict-peer-deps\`
-#### \`package-lock\`
#### \`foreground-scripts\`
#### \`ignore-scripts\`
#### \`audit\`
@@ -2641,6 +3074,9 @@ Get a value from the npm configuration
Usage:
npm get [ ...] (See \`npm config\`)
+Options:
+[-l|--long]
+
Run "npm help get" for more info
\`\`\`bash
@@ -2649,7 +3085,7 @@ npm get [ ...] (See \`npm config\`)
Note: This command is unaware of workspaces.
-NO PARAMS
+#### \`long\`
`
exports[`test/lib/docs.js TAP usage help > must match snapshot 1`] = `
@@ -2766,7 +3202,7 @@ Options:
[-E|--save-exact] [-g|--global]
[--install-strategy ] [--legacy-bundling]
[--global-style] [--omit [--omit ...]]
-[--strict-peer-deps] [--no-package-lock] [--foreground-scripts]
+[--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--foreground-scripts]
[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run]
[-w|--workspace [-w|--workspace ...]]
[-ws|--workspaces] [--include-workspace-root] [--install-links]
@@ -2789,6 +3225,7 @@ aliases: add, i, in, ins, inst, insta, instal, isnt, isnta, isntal, isntall
#### \`global-style\`
#### \`omit\`
#### \`strict-peer-deps\`
+#### \`prefer-dedupe\`
#### \`package-lock\`
#### \`foreground-scripts\`
#### \`ignore-scripts\`
@@ -2811,8 +3248,8 @@ npm install-ci-test
Options:
[--install-strategy ] [--legacy-bundling]
[--global-style] [--omit [--omit ...]]
-[--strict-peer-deps] [--no-package-lock] [--foreground-scripts]
-[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run]
+[--strict-peer-deps] [--foreground-scripts] [--ignore-scripts] [--no-audit]
+[--no-bin-links] [--no-fund] [--dry-run]
[-w|--workspace [-w|--workspace ...]]
[-ws|--workspaces] [--include-workspace-root] [--install-links]
@@ -2831,7 +3268,6 @@ aliases: cit, clean-install-test, sit
#### \`global-style\`
#### \`omit\`
#### \`strict-peer-deps\`
-#### \`package-lock\`
#### \`foreground-scripts\`
#### \`ignore-scripts\`
#### \`audit\`
@@ -2855,7 +3291,7 @@ Options:
[-E|--save-exact] [-g|--global]
[--install-strategy ] [--legacy-bundling]
[--global-style] [--omit [--omit ...]]
-[--strict-peer-deps] [--no-package-lock] [--foreground-scripts]
+[--strict-peer-deps] [--prefer-dedupe] [--no-package-lock] [--foreground-scripts]
[--ignore-scripts] [--no-audit] [--no-bin-links] [--no-fund] [--dry-run]
[-w|--workspace [-w|--workspace ...]]
[-ws|--workspaces] [--include-workspace-root] [--install-links]
@@ -2878,6 +3314,7 @@ alias: it
#### \`global-style\`
#### \`omit\`
#### \`strict-peer-deps\`
+#### \`prefer-dedupe\`
#### \`package-lock\`
#### \`foreground-scripts\`
#### \`ignore-scripts\`
@@ -3328,7 +3765,8 @@ npm publish
Options:
[--tag ] [--access ] [--dry-run] [--otp ]
[-w|--workspace [-w|--workspace ...]]
-[-ws|--workspaces] [--include-workspace-root] [--provenance]
+[-ws|--workspaces] [--include-workspace-root]
+[--provenance|--provenance-file ]
Run "npm help publish" for more info
@@ -3344,6 +3782,7 @@ npm publish
#### \`workspaces\`
#### \`include-workspace-root\`
#### \`provenance\`
+#### \`provenance-file\`
`
exports[`test/lib/docs.js TAP usage query > must match snapshot 1`] = `
@@ -3535,6 +3974,9 @@ Set a value in the npm configuration
Usage:
npm set = [= ...] (See \`npm config\`)
+Options:
+[-g|--global] [-L|--location ]
+
Run "npm help set" for more info
\`\`\`bash
@@ -3543,7 +3985,8 @@ npm set = [= ...] (See \`npm config\`)
Note: This command is unaware of workspaces.
-NO PARAMS
+#### \`global\`
+#### \`location\`
`
exports[`test/lib/docs.js TAP usage shrinkwrap > must match snapshot 1`] = `
@@ -3733,6 +4176,7 @@ npm uninstall [<@scope>/]...
Options:
[-S|--save|--no-save|--save-prod|--save-dev|--save-optional|--save-peer|--save-bundle]
+[-g|--global]
[-w|--workspace [-w|--workspace ...]]
[-ws|--workspaces] [--include-workspace-root] [--install-links]
@@ -3747,6 +4191,7 @@ aliases: unlink, remove, rm, r, un
\`\`\`
#### \`save\`
+#### \`global\`
#### \`workspace\`
#### \`workspaces\`
#### \`include-workspace-root\`
diff --git a/deps/npm/tap-snapshots/test/lib/utils/config/definition.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/config/definition.js.test.cjs
index ad506ae8e3585c..bf4dc30a041f77 100644
--- a/deps/npm/tap-snapshots/test/lib/utils/config/definition.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/utils/config/definition.js.test.cjs
@@ -15,6 +15,8 @@ exports[`test/lib/utils/config/definition.js TAP basic definition > description
it should not be used ever
not even once.
+
+
`
exports[`test/lib/utils/config/definition.js TAP basic definition > human-readable description 1`] = `
@@ -24,6 +26,8 @@ exports[`test/lib/utils/config/definition.js TAP basic definition > human-readab
* Type: Number or String
just a test thingie
+
+
`
exports[`test/lib/utils/config/definition.js TAP long description > cols=-1 1`] = `
@@ -93,6 +97,7 @@ with (multiple) {
}
\`\`\`
+
`
exports[`test/lib/utils/config/definition.js TAP long description > cols=0 1`] = `
@@ -162,6 +167,7 @@ with (multiple) {
}
\`\`\`
+
`
exports[`test/lib/utils/config/definition.js TAP long description > cols=40 1`] = `
@@ -201,6 +207,7 @@ with (multiple) {
}
\`\`\`
+
`
exports[`test/lib/utils/config/definition.js TAP long description > cols=9000 1`] = `
@@ -231,6 +238,7 @@ with (multiple) {
}
\`\`\`
+
`
exports[`test/lib/utils/config/definition.js TAP long description > cols=NaN 1`] = `
@@ -261,4 +269,5 @@ with (multiple) {
}
\`\`\`
+
`
diff --git a/deps/npm/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs
index 8550617eb0a00a..876cc6552b7605 100644
--- a/deps/npm/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/utils/explain-dep.js.test.cjs
@@ -5,7 +5,7 @@
* Make sure to inspect the output below. Do not ignore changes!
*/
'use strict'
-exports[`test/lib/utils/explain-dep.js TAP > ellipses test one 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic > ellipses test one 1`] = `
manydep@1.0.0
manydep@"1.0.0" from prod-dep@1.2.3
node_modules/prod-dep
@@ -13,7 +13,7 @@ manydep@1.0.0
7 more (optdep, extra-neos, deep-dev, peer, the root project, ...)
`
-exports[`test/lib/utils/explain-dep.js TAP > ellipses test two 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic > ellipses test two 1`] = `
manydep@1.0.0
manydep@"1.0.0" from prod-dep@1.2.3
node_modules/prod-dep
@@ -21,29 +21,29 @@ manydep@1.0.0
6 more (optdep, extra-neos, deep-dev, peer, the root project, a package with a pretty long name)
`
-exports[`test/lib/utils/explain-dep.js TAP bundled > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic bundled > explain color deep 1`] = `
[1mbundle-of-joy[22m@[1m1.0.0[22m [1m[34mbundled[39m[22m[2m[22m
[2mnode_modules/bundle-of-joy[22m
[34mbundled[39m [1mprod-dep[22m@"[1m1.x[22m" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP bundled > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic bundled > explain nocolor shallow 1`] = `
bundle-of-joy@1.0.0 bundled
node_modules/bundle-of-joy
bundled prod-dep@"1.x" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP bundled > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic bundled > print color 1`] = `
[1mbundle-of-joy[22m@[1m1.0.0[22m [1m[34mbundled[39m[22m[2m[22m
[2mnode_modules/bundle-of-joy[22m
`
-exports[`test/lib/utils/explain-dep.js TAP bundled > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic bundled > print nocolor 1`] = `
bundle-of-joy@1.0.0 bundled
node_modules/bundle-of-joy
`
-exports[`test/lib/utils/explain-dep.js TAP deepDev > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic deepDev > explain color deep 1`] = `
[1mdeep-dev[22m@[1m2.3.4[22m [1m[33mdev[39m[22m[2m[22m
[2mnode_modules/deep-dev[22m
[1mdeep-dev[22m@"[1m2.x[22m" from [1mmetadev[22m@[1m3.4.5[22m[2m[22m
@@ -53,7 +53,7 @@ exports[`test/lib/utils/explain-dep.js TAP deepDev > explain color deep 1`] = `
[33mdev[39m [1mtopdev[22m@"[1m4.x[22m" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP deepDev > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic deepDev > explain nocolor shallow 1`] = `
deep-dev@2.3.4 dev
node_modules/deep-dev
deep-dev@"2.x" from metadev@3.4.5
@@ -62,37 +62,37 @@ node_modules/deep-dev
node_modules/topdev
`
-exports[`test/lib/utils/explain-dep.js TAP deepDev > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic deepDev > print color 1`] = `
[1mdeep-dev[22m@[1m2.3.4[22m [1m[33mdev[39m[22m[2m[22m
[2mnode_modules/deep-dev[22m
`
-exports[`test/lib/utils/explain-dep.js TAP deepDev > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic deepDev > print nocolor 1`] = `
deep-dev@2.3.4 dev
node_modules/deep-dev
`
-exports[`test/lib/utils/explain-dep.js TAP extraneous > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic extraneous > explain color deep 1`] = `
[1mextra-neos[22m@[1m1337.420.69-lol[22m [1m[31mextraneous[39m[22m[2m[22m
[2mnode_modules/extra-neos[22m
`
-exports[`test/lib/utils/explain-dep.js TAP extraneous > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic extraneous > explain nocolor shallow 1`] = `
extra-neos@1337.420.69-lol extraneous
node_modules/extra-neos
`
-exports[`test/lib/utils/explain-dep.js TAP extraneous > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic extraneous > print color 1`] = `
[1mextra-neos[22m@[1m1337.420.69-lol[22m [1m[31mextraneous[39m[22m[2m[22m
[2mnode_modules/extra-neos[22m
`
-exports[`test/lib/utils/explain-dep.js TAP extraneous > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic extraneous > print nocolor 1`] = `
extra-neos@1337.420.69-lol extraneous
node_modules/extra-neos
`
-exports[`test/lib/utils/explain-dep.js TAP manyDeps > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic manyDeps > explain color deep 1`] = `
[1mmanydep[22m@[1m1.0.0[22m
[1mmanydep[22m@"[1m1.0.0[22m" from [1mprod-dep[22m@[1m1.2.3[22m[2m[22m
[2mnode_modules/prod-dep[22m
@@ -118,7 +118,7 @@ exports[`test/lib/utils/explain-dep.js TAP manyDeps > explain color deep 1`] = `
[1mmanydep[22m@"[1m1[22m" from [1myet another a package with a pretty long name[22m@[1m1.2.3[22m
`
-exports[`test/lib/utils/explain-dep.js TAP manyDeps > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic manyDeps > explain nocolor shallow 1`] = `
manydep@1.0.0
manydep@"1.0.0" from prod-dep@1.2.3
node_modules/prod-dep
@@ -126,103 +126,103 @@ manydep@1.0.0
8 more (optdep, extra-neos, deep-dev, peer, the root project, ...)
`
-exports[`test/lib/utils/explain-dep.js TAP manyDeps > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic manyDeps > print color 1`] = `
[1mmanydep[22m@[1m1.0.0[22m
`
-exports[`test/lib/utils/explain-dep.js TAP manyDeps > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic manyDeps > print nocolor 1`] = `
manydep@1.0.0
`
-exports[`test/lib/utils/explain-dep.js TAP optional > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic optional > explain color deep 1`] = `
[1moptdep[22m@[1m1.0.0[22m [1m[36moptional[39m[22m[2m[22m
[2mnode_modules/optdep[22m
[36moptional[39m [1moptdep[22m@"[1m1.0.0[22m" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP optional > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic optional > explain nocolor shallow 1`] = `
optdep@1.0.0 optional
node_modules/optdep
optional optdep@"1.0.0" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP optional > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic optional > print color 1`] = `
[1moptdep[22m@[1m1.0.0[22m [1m[36moptional[39m[22m[2m[22m
[2mnode_modules/optdep[22m
`
-exports[`test/lib/utils/explain-dep.js TAP optional > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic optional > print nocolor 1`] = `
optdep@1.0.0 optional
node_modules/optdep
`
-exports[`test/lib/utils/explain-dep.js TAP overridden > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic overridden > explain color deep 1`] = `
[1moverridden-root[22m@[1m1.0.0[22m [1m[90moverridden[39m[22m[2m[22m
[2mnode_modules/overridden-root[22m
[90moverridden[39m [1moverridden-dep[22m@"[1m1.0.0[22m" (was "^2.0.0") from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP overridden > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic overridden > explain nocolor shallow 1`] = `
overridden-root@1.0.0 overridden
node_modules/overridden-root
overridden overridden-dep@"1.0.0" (was "^2.0.0") from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP overridden > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic overridden > print color 1`] = `
[1moverridden-root[22m@[1m1.0.0[22m [1m[90moverridden[39m[22m[2m[22m
[2mnode_modules/overridden-root[22m
`
-exports[`test/lib/utils/explain-dep.js TAP overridden > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic overridden > print nocolor 1`] = `
overridden-root@1.0.0 overridden
node_modules/overridden-root
`
-exports[`test/lib/utils/explain-dep.js TAP peer > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic peer > explain color deep 1`] = `
[1mpeer[22m@[1m1.0.0[22m [1m[35mpeer[39m[22m[2m[22m
[2mnode_modules/peer[22m
[35mpeer[39m [1mpeer[22m@"[1m1.0.0[22m" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP peer > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic peer > explain nocolor shallow 1`] = `
peer@1.0.0 peer
node_modules/peer
peer peer@"1.0.0" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP peer > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic peer > print color 1`] = `
[1mpeer[22m@[1m1.0.0[22m [1m[35mpeer[39m[22m[2m[22m
[2mnode_modules/peer[22m
`
-exports[`test/lib/utils/explain-dep.js TAP peer > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic peer > print nocolor 1`] = `
peer@1.0.0 peer
node_modules/peer
`
-exports[`test/lib/utils/explain-dep.js TAP prodDep > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic prodDep > explain color deep 1`] = `
[1mprod-dep[22m@[1m1.2.3[22m[2m[22m
[2mnode_modules/prod-dep[22m
[1mprod-dep[22m@"[1m1.x[22m" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP prodDep > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic prodDep > explain nocolor shallow 1`] = `
prod-dep@1.2.3
node_modules/prod-dep
prod-dep@"1.x" from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP prodDep > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic prodDep > print color 1`] = `
[1mprod-dep[22m@[1m1.2.3[22m[2m[22m
[2mnode_modules/prod-dep[22m
`
-exports[`test/lib/utils/explain-dep.js TAP prodDep > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic prodDep > print nocolor 1`] = `
prod-dep@1.2.3
node_modules/prod-dep
`
-exports[`test/lib/utils/explain-dep.js TAP workspaces > explain color deep 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic workspaces > explain color deep 1`] = `
[32ma@1.0.0[39m[2m[22m
[2ma[22m
[32ma@1.0.0[39m[2m[22m
@@ -230,7 +230,7 @@ exports[`test/lib/utils/explain-dep.js TAP workspaces > explain color deep 1`] =
[32mworkspace[39m [1ma[22m from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP workspaces > explain nocolor shallow 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic workspaces > explain nocolor shallow 1`] = `
a@1.0.0
a
a@1.0.0
@@ -238,12 +238,12 @@ a
workspace a from the root project
`
-exports[`test/lib/utils/explain-dep.js TAP workspaces > print color 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic workspaces > print color 1`] = `
[32ma@1.0.0[39m[2m[22m
[2ma[22m
`
-exports[`test/lib/utils/explain-dep.js TAP workspaces > print nocolor 1`] = `
+exports[`test/lib/utils/explain-dep.js TAP basic workspaces > print nocolor 1`] = `
a@1.0.0
a
`
diff --git a/deps/npm/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs
index 99ad5c0f31e900..3d73019d3e45b9 100644
--- a/deps/npm/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/utils/explain-eresolve.js.test.cjs
@@ -5,7 +5,7 @@
* Make sure to inspect the output below. Do not ignore changes!
*/
'use strict'
-exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic chain-conflict > explain with color, depth of 2 1`] = `
While resolving: [1mproject[22m@[1m1.2.3[22m
Found: [1m@isaacs/testing-peer-dep-conflict-chain-d[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/testing-peer-dep-conflict-chain-d[22m
@@ -17,7 +17,7 @@ Could not resolve dependency:
[1m@isaacs/testing-peer-dep-conflict-chain-c[22m@"[1m1[22m" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic chain-conflict > explain with no color, depth of 6 1`] = `
While resolving: project@1.2.3
Found: @isaacs/testing-peer-dep-conflict-chain-d@2.0.0
node_modules/@isaacs/testing-peer-dep-conflict-chain-d
@@ -29,7 +29,7 @@ node_modules/@isaacs/testing-peer-dep-conflict-chain-c
@isaacs/testing-peer-dep-conflict-chain-c@"1" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic chain-conflict > report from color 1`] = `
# npm resolution error report
While resolving: project@1.2.3
@@ -47,7 +47,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic chain-conflict > report with color 1`] = `
While resolving: [1mproject[22m@[1m1.2.3[22m
Found: [1m@isaacs/testing-peer-dep-conflict-chain-d[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/testing-peer-dep-conflict-chain-d[22m
@@ -63,7 +63,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP chain-conflict > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic chain-conflict > report with no color 1`] = `
While resolving: project@1.2.3
Found: @isaacs/testing-peer-dep-conflict-chain-d@2.0.0
node_modules/@isaacs/testing-peer-dep-conflict-chain-d
@@ -79,7 +79,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic cycleNested > explain with color, depth of 2 1`] = `
Found: [1m@isaacs/peer-dep-cycle-c[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/peer-dep-cycle-c[22m
[1m@isaacs/peer-dep-cycle-c[22m@"[1m2.x[22m" from the root project
@@ -97,7 +97,7 @@ Conflicting peer dependency: [1m@isaacs/peer-dep-cycle-c[22m@[1m1.0.0[22m[2
[2mnode_modules/@isaacs/peer-dep-cycle-a[22m
`
-exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic cycleNested > explain with no color, depth of 6 1`] = `
Found: @isaacs/peer-dep-cycle-c@2.0.0
node_modules/@isaacs/peer-dep-cycle-c
@isaacs/peer-dep-cycle-c@"2.x" from the root project
@@ -116,7 +116,7 @@ node_modules/@isaacs/peer-dep-cycle-c
@isaacs/peer-dep-cycle-a@"1.x" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic cycleNested > report from color 1`] = `
# npm resolution error report
Found: @isaacs/peer-dep-cycle-c@2.0.0
@@ -141,7 +141,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic cycleNested > report with color 1`] = `
Found: [1m@isaacs/peer-dep-cycle-c[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/peer-dep-cycle-c[22m
[1m@isaacs/peer-dep-cycle-c[22m@"[1m2.x[22m" from the root project
@@ -164,7 +164,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP cycleNested > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic cycleNested > report with no color 1`] = `
Found: @isaacs/peer-dep-cycle-c@2.0.0
node_modules/@isaacs/peer-dep-cycle-c
@isaacs/peer-dep-cycle-c@"2.x" from the root project
@@ -187,7 +187,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic eslint-plugin case > explain with color, depth of 2 1`] = `
While resolving: [1meslint-plugin-react[22m@[1m7.24.0[22m
Found: [1meslint[22m@[1m6.8.0[22m[2m[22m
[2mnode_modules/eslint[22m
@@ -204,7 +204,7 @@ Conflicting peer dependency: [1meslint[22m@[1m7.31.0[22m[2m[22m
[33mdev[39m [1meslint-plugin-eslint-plugin[22m@"[1m^3.1.0[22m" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic eslint-plugin case > explain with no color, depth of 6 1`] = `
While resolving: eslint-plugin-react@7.24.0
Found: eslint@6.8.0
node_modules/eslint
@@ -227,7 +227,7 @@ node_modules/eslint
dev eslint-plugin-eslint-plugin@"^3.1.0" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic eslint-plugin case > report from color 1`] = `
# npm resolution error report
While resolving: eslint-plugin-react@7.24.0
@@ -261,7 +261,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic eslint-plugin case > report with color 1`] = `
While resolving: [1meslint-plugin-react[22m@[1m7.24.0[22m
Found: [1meslint[22m@[1m6.8.0[22m[2m[22m
[2mnode_modules/eslint[22m
@@ -285,7 +285,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP eslint-plugin case > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic eslint-plugin case > report with no color 1`] = `
While resolving: eslint-plugin-react@7.24.0
Found: eslint@6.8.0
node_modules/eslint
@@ -309,7 +309,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP gatsby > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic gatsby > explain with color, depth of 2 1`] = `
While resolving: [1mgatsby-recipes[22m@[1m0.2.31[22m
Found: [1mink[22m@[1m3.0.0-7[22m[2m[22m
[2mnode_modules/ink[22m
@@ -325,7 +325,7 @@ Could not resolve dependency:
[2mnode_modules/gatsby-recipes[22m
`
-exports[`test/lib/utils/explain-eresolve.js TAP gatsby > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic gatsby > explain with no color, depth of 6 1`] = `
While resolving: gatsby-recipes@0.2.31
Found: ink@3.0.0-7
node_modules/ink
@@ -349,7 +349,7 @@ node_modules/ink-box
gatsby@"" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic gatsby > report from color 1`] = `
# npm resolution error report
While resolving: gatsby-recipes@0.2.31
@@ -379,7 +379,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic gatsby > report with color 1`] = `
While resolving: [1mgatsby-recipes[22m@[1m0.2.31[22m
Found: [1mink[22m@[1m3.0.0-7[22m[2m[22m
[2mnode_modules/ink[22m
@@ -406,7 +406,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP gatsby > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic gatsby > report with no color 1`] = `
While resolving: gatsby-recipes@0.2.31
Found: ink@3.0.0-7
node_modules/ink
@@ -433,7 +433,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, but has current edge > explain with color, depth of 2 1`] = `
While resolving: [1meslint[22m@[1m7.22.0[22m
Found: [33mdev[39m [1meslint[22m@"[1mfile:.[22m" from the root project
@@ -443,7 +443,7 @@ Could not resolve dependency:
[33mdev[39m [1meslint-plugin-jsdoc[22m@"[1m^22.1.0[22m" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, but has current edge > explain with no color, depth of 6 1`] = `
While resolving: eslint@7.22.0
Found: dev eslint@"file:." from the root project
@@ -453,7 +453,7 @@ node_modules/eslint-plugin-jsdoc
dev eslint-plugin-jsdoc@"^22.1.0" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, but has current edge > report from color 1`] = `
# npm resolution error report
While resolving: eslint@7.22.0
@@ -469,7 +469,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, but has current edge > report with color 1`] = `
While resolving: [1meslint[22m@[1m7.22.0[22m
Found: [33mdev[39m [1meslint[22m@"[1mfile:.[22m" from the root project
@@ -483,7 +483,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, but has current edge > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, but has current edge > report with no color 1`] = `
While resolving: eslint@7.22.0
Found: dev eslint@"file:." from the root project
@@ -497,7 +497,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, no current edge, idk > explain with color, depth of 2 1`] = `
While resolving: [1meslint[22m@[1m7.22.0[22m
Found: [35mpeer[39m [1meslint[22m@"[1m^6.0.0[22m" from [1meslint-plugin-jsdoc[22m@[1m22.2.0[22m[2m[22m
[2mnode_modules/eslint-plugin-jsdoc[22m
@@ -509,7 +509,7 @@ Could not resolve dependency:
[33mdev[39m [1meslint-plugin-jsdoc[22m@"[1m^22.1.0[22m" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, no current edge, idk > explain with no color, depth of 6 1`] = `
While resolving: eslint@7.22.0
Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0
node_modules/eslint-plugin-jsdoc
@@ -521,7 +521,7 @@ node_modules/eslint-plugin-jsdoc
dev eslint-plugin-jsdoc@"^22.1.0" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, no current edge, idk > report from color 1`] = `
# npm resolution error report
While resolving: eslint@7.22.0
@@ -539,7 +539,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, no current edge, idk > report with color 1`] = `
While resolving: [1meslint[22m@[1m7.22.0[22m
Found: [35mpeer[39m [1meslint[22m@"[1m^6.0.0[22m" from [1meslint-plugin-jsdoc[22m@[1m22.2.0[22m[2m[22m
[2mnode_modules/eslint-plugin-jsdoc[22m
@@ -555,7 +555,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP no current node, no current edge, idk > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic no current node, no current edge, idk > report with no color 1`] = `
While resolving: eslint@7.22.0
Found: peer eslint@"^6.0.0" from eslint-plugin-jsdoc@22.2.0
node_modules/eslint-plugin-jsdoc
@@ -571,7 +571,7 @@ this command with --force or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > explain with color, depth of 2 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic withShrinkwrap > explain with color, depth of 2 1`] = `
While resolving: [1m@isaacs/peer-dep-cycle-b[22m@[1m1.0.0[22m
Found: [1m@isaacs/peer-dep-cycle-c[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/peer-dep-cycle-c[22m
@@ -584,7 +584,7 @@ Could not resolve dependency:
[2mnode_modules/@isaacs/peer-dep-cycle-a[22m
`
-exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > explain with no color, depth of 6 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic withShrinkwrap > explain with no color, depth of 6 1`] = `
While resolving: @isaacs/peer-dep-cycle-b@1.0.0
Found: @isaacs/peer-dep-cycle-c@2.0.0
node_modules/@isaacs/peer-dep-cycle-c
@@ -598,7 +598,7 @@ node_modules/@isaacs/peer-dep-cycle-b
@isaacs/peer-dep-cycle-a@"1.x" from the root project
`
-exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report from color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic withShrinkwrap > report from color 1`] = `
# npm resolution error report
While resolving: @isaacs/peer-dep-cycle-b@1.0.0
@@ -618,7 +618,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report with color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic withShrinkwrap > report with color 1`] = `
While resolving: [1m@isaacs/peer-dep-cycle-b[22m@[1m1.0.0[22m
Found: [1m@isaacs/peer-dep-cycle-c[22m@[1m2.0.0[22m[2m[22m
[2mnode_modules/@isaacs/peer-dep-cycle-c[22m
@@ -636,7 +636,7 @@ this command with --no-strict-peer-deps, --force, or --legacy-peer-deps
to accept an incorrect (and potentially broken) dependency resolution.
`
-exports[`test/lib/utils/explain-eresolve.js TAP withShrinkwrap > report with no color 1`] = `
+exports[`test/lib/utils/explain-eresolve.js TAP basic withShrinkwrap > report with no color 1`] = `
While resolving: @isaacs/peer-dep-cycle-b@1.0.0
Found: @isaacs/peer-dep-cycle-c@2.0.0
node_modules/@isaacs/peer-dep-cycle-c
diff --git a/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs
index 8af3c475c7720c..968b14a20d90f5 100644
--- a/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs
+++ b/deps/npm/tap-snapshots/test/lib/utils/open-url-prompt.js.test.cjs
@@ -6,20 +6,10 @@
*/
'use strict'
exports[`test/lib/utils/open-url-prompt.js TAP opens a url > must match snapshot 1`] = `
-Array [
- Array [
- String(
- npm home:
- https://www.npmjs.com
- ),
- ],
-]
+npm home:
+https://www.npmjs.com
`
exports[`test/lib/utils/open-url-prompt.js TAP prints json output > must match snapshot 1`] = `
-Array [
- Array [
- "{\\"title\\":\\"npm home\\",\\"url\\":\\"https://www.npmjs.com\\"}",
- ],
-]
+{"title":"npm home","url":"https://www.npmjs.com"}
`
diff --git a/deps/npm/tap-snapshots/test/lib/utils/reify-finish.js.test.cjs b/deps/npm/tap-snapshots/test/lib/utils/reify-finish.js.test.cjs
deleted file mode 100644
index a82905a399679a..00000000000000
--- a/deps/npm/tap-snapshots/test/lib/utils/reify-finish.js.test.cjs
+++ /dev/null
@@ -1,15 +0,0 @@
-/* IMPORTANT
- * This snapshot file is auto-generated, but designed for humans.
- * It should be checked into source control and tracked carefully.
- * Re-generate by setting TAP_SNAPSHOT=1 and running tests.
- * Make sure to inspect the output below. Do not ignore changes!
- */
-'use strict'
-exports[`test/lib/utils/reify-finish.js TAP should write if everything above passes > written config 1`] = `
-hasBuiltinConfig=true
-x=y
-
-[nested]
-foo=bar
-
-`
diff --git a/deps/npm/test/bin/npx-cli.js b/deps/npm/test/bin/npx-cli.js
index 5670f24f07b77a..67a8d3319fc18d 100644
--- a/deps/npm/test/bin/npx-cli.js
+++ b/deps/npm/test/bin/npx-cli.js
@@ -1,5 +1,5 @@
const t = require('tap')
-const mockGlobals = require('../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const tmock = require('../fixtures/tmock')
const npm = require.resolve('../../bin/npm-cli.js')
diff --git a/deps/npm/test/fixtures/libnpmsearch-stream-result.js b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
index b2ec20f59efebc..1ec8b7b113d6b8 100644
--- a/deps/npm/test/fixtures/libnpmsearch-stream-result.js
+++ b/deps/npm/test/fixtures/libnpmsearch-stream-result.js
@@ -216,7 +216,7 @@ module.exports = [
version: '1.0.1',
description: 'Retrieves a name:pathname Map for a given workspaces config',
keywords: [
- 'npm',
+ '\x1B[33mnpm\x1B[39m',
'npmcli',
'libnpm',
'cli',
@@ -240,10 +240,10 @@ module.exports = [
],
},
{
- name: 'libnpmversion',
+ name: '\x1B[31mlibnpmversion\x1B[39m',
scope: 'unscoped',
version: '1.0.7',
- description: "library to do the things that 'npm version' does",
+ description: "library to do the things that '\x1B[32mnpm version\x1B[39m' does",
date: '2020-11-04T00:21:41.069Z',
links: {
npm: 'https://www.npmjs.com/package/libnpmversion',
@@ -259,7 +259,7 @@ module.exports = [
},
publisher: { username: 'isaacs', email: 'i@izs.me' },
maintainers: [
- { username: 'nlf', email: 'quitlahok@gmail.com' },
+ { username: '\x1B[34mnlf\x1B[39m', email: 'quitlahok@gmail.com' },
{ username: 'ruyadorno', email: 'ruyadorno@hotmail.com' },
{ username: 'darcyclarke', email: 'darcy@darcyclarke.me' },
{ username: 'isaacs', email: 'i@izs.me' },
diff --git a/deps/npm/test/fixtures/mock-globals.js b/deps/npm/test/fixtures/mock-globals.js
deleted file mode 100644
index aec8a83963687a..00000000000000
--- a/deps/npm/test/fixtures/mock-globals.js
+++ /dev/null
@@ -1,236 +0,0 @@
-// An initial implementation for a feature that will hopefully exist in tap
-// https://github.com/tapjs/node-tap/issues/789
-// This file is only used in tests but it is still tested itself.
-// Hopefully it can be removed for a feature in tap in the future
-
-const sep = '.'
-const escapeSep = '"'
-const has = (o, k) => Object.prototype.hasOwnProperty.call(o, k)
-const opd = (o, k) => Object.getOwnPropertyDescriptor(o, k)
-const po = (o) => Object.getPrototypeOf(o)
-const pojo = (o) => Object.prototype.toString.call(o) === '[object Object]'
-const last = (arr) => arr[arr.length - 1]
-const dupes = (arr) => arr.filter((k, i) => arr.indexOf(k) !== i)
-const dupesStartsWith = (arr) => arr.filter((k1) => arr.some((k2) => k2.startsWith(k1 + sep)))
-
-const splitLastSep = (str) => {
- let escaped = false
- for (let i = str.length - 1; i >= 0; i--) {
- const c = str[i]
- const cp = str[i + 1]
- const cn = str[i - 1]
- if (!escaped && c === escapeSep && (cp == null || cp === sep)) {
- escaped = true
- continue
- }
- if (escaped && c === escapeSep && cn === sep) {
- escaped = false
- continue
- }
- if (!escaped && c === sep) {
- return [
- str.slice(0, i),
- str.slice(i + 1).replace(new RegExp(`^${escapeSep}(.*)${escapeSep}$`), '$1'),
- ]
- }
- }
- return [str]
-}
-
-// A weird getter that can look up keys on nested objects but also
-// match keys with dots in their names, eg { 'process.env': { TERM: 'a' } }
-// can be looked up with the key 'process.env.TERM'
-const get = (obj, key, childKey = '') => {
- if (has(obj, key)) {
- return childKey ? get(obj[key], childKey) : obj[key]
- }
- const split = splitLastSep(key)
- if (split.length === 2) {
- const [parentKey, prefix] = split
- return get(
- obj,
- parentKey,
- prefix + (childKey && sep + childKey)
- )
- }
-}
-
-// Map an object to an array of nested keys separated by dots
-// { a: 1, b: { c: 2, d: [1] } } => ['a', 'b.c', 'b.d']
-const getKeys = (values, p = '', acc = []) =>
- Object.entries(values).reduce((memo, [k, value]) => {
- const key = p ? [p, k].join(sep) : k
- return pojo(value) ? getKeys(value, key, memo) : memo.concat(key)
- }, acc)
-
-// Walk prototype chain to get first available descriptor. This is necessary
-// to get the current property descriptor for things like `process.on`.
-// Since `opd(process, 'on') === undefined` but if you
-// walk up the prototype chain you get the original descriptor
-// `opd(po(po(process)), 'on') === { value, ... }`
-const protoDescriptor = (obj, key) => {
- let descriptor
- // i always wanted to assign variables in a while loop's condition
- // i thought it would feel better than this
- while (!(descriptor = opd(obj, key))) {
- if (!(obj = po(obj))) {
- break
- }
- }
- return descriptor
-}
-
-// Path can be different cases across platform so get the original case
-// of the path before anything is changed
-// XXX: other special cases to handle?
-const specialCaseKeys = (() => {
- const originalKeys = {
- PATH: process.env.PATH ? 'PATH' : process.env.Path ? 'Path' : 'path',
- }
- return (key) => {
- switch (key.toLowerCase()) {
- case 'process.env.path':
- return originalKeys.PATH
- }
- }
-})()
-
-const _setGlobal = Symbol('setGlobal')
-const _nextDescriptor = Symbol('nextDescriptor')
-
-class DescriptorStack {
- #stack = []
- #global = null
- #valueKey = null
- #defaultDescriptor = { configurable: true, writable: true, enumerable: true }
- #delete = () => ({ DELETE: true })
- #isDelete = (o) => o && o.DELETE === true
-
- constructor (key) {
- const keys = splitLastSep(key)
- this.#global = keys.length === 1 ? global : get(global, keys[0])
- this.#valueKey = specialCaseKeys(key) || last(keys)
- // If the global object doesnt return a descriptor for the key
- // then we mark it for deletion on teardown
- this.#stack = [
- protoDescriptor(this.#global, this.#valueKey) || this.#delete(),
- ]
- }
-
- add (value) {
- // This must be a unique object so we can find it later via indexOf
- // That's why delete/nextDescriptor create new objects
- const nextDescriptor = this[_nextDescriptor](value)
- this.#stack.push(this[_setGlobal](nextDescriptor))
-
- return () => {
- const index = this.#stack.indexOf(nextDescriptor)
- // If the stack doesnt contain the descriptor anymore
- // than do nothing. This keeps the reset function indempotent
- if (index > -1) {
- // Resetting removes a descriptor from the stack
- this.#stack.splice(index, 1)
- // But we always reset to what is now the most recent in case
- // resets are being called manually out of order
- this[_setGlobal](last(this.#stack))
- }
- }
- }
-
- reset () {
- // Everything could be reset manually so only
- // teardown if we have an initial descriptor left
- // and then delete the rest of the stack
- if (this.#stack.length) {
- this[_setGlobal](this.#stack[0])
- this.#stack.length = 0
- }
- }
-
- [_setGlobal] (d) {
- if (this.#isDelete(d)) {
- delete this.#global[this.#valueKey]
- } else {
- Object.defineProperty(this.#global, this.#valueKey, d)
- }
- return d
- }
-
- [_nextDescriptor] (value) {
- if (value === undefined) {
- return this.#delete()
- }
- const d = last(this.#stack)
- return {
- // If the previous descriptor was one to delete the property
- // then use the default descriptor as the base
- ...(this.#isDelete(d) ? this.#defaultDescriptor : d),
- ...(d && d.get ? { get: () => value } : { value }),
- }
- }
-}
-
-class MockGlobals {
- #descriptors = {}
-
- register (globals, { replace = false } = {}) {
- // Replace means dont merge in object values but replace them instead
- // so we only get top level keys instead of walking the obj
- const keys = replace ? Object.keys(globals) : getKeys(globals)
-
- // An error state where due to object mode there are multiple global
- // values to be set with the same key
- const duplicates = dupes(keys)
- if (duplicates.length) {
- throw new Error(`mockGlobals was called with duplicate keys: ${duplicates}`)
- }
-
- // Another error where when in replace mode overlapping keys are set like
- // process and process.stdout which would cause unexpected behavior
- const overlapping = dupesStartsWith(keys)
- if (overlapping.length) {
- const message = overlapping
- .map((k) => `${k} -> ${keys.filter((kk) => kk.startsWith(k + sep))}`)
- throw new Error(`mockGlobals was called with overlapping keys: ${message}`)
- }
-
- // Set each property passed in and return fns to reset them
- // Return an object with each path as a key for manually resetting in each test
- return keys.reduce((acc, key) => {
- const desc = this.#descriptors[key] || (this.#descriptors[key] = new DescriptorStack(key))
- acc[key] = desc.add(get(globals, key))
- return acc
- }, {})
- }
-
- teardown (key) {
- if (!key) {
- Object.values(this.#descriptors).forEach((d) => d.reset())
- return
- }
- this.#descriptors[key].reset()
- }
-}
-
-// Each test has one instance of MockGlobals so it can be called multiple times per test
-// Its a weak map so that it can be garbage collected along with the tap tests without
-// needing to explicitly call cache.delete
-const cache = new WeakMap()
-
-module.exports = (t, globals, options) => {
- let instance = cache.get(t)
- if (!instance) {
- instance = cache.set(t, new MockGlobals()).get(t)
- // Teardown only needs to be initialized once. The instance
- // will keep track of its own state during the test
- t.teardown(() => instance.teardown())
- }
-
- return {
- // Reset contains only the functions to reset the globals
- // set by this function call
- reset: instance.register(globals, options),
- // Teardown will reset across all calls tied to this test
- teardown: () => instance.teardown(),
- }
-}
diff --git a/deps/npm/test/fixtures/mock-npm.js b/deps/npm/test/fixtures/mock-npm.js
index a2d35c2479d733..b91ee8a3933a39 100644
--- a/deps/npm/test/fixtures/mock-npm.js
+++ b/deps/npm/test/fixtures/mock-npm.js
@@ -4,7 +4,7 @@ const path = require('path')
const tap = require('tap')
const errorMessage = require('../../lib/utils/error-message')
const mockLogs = require('./mock-logs')
-const mockGlobals = require('./mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const tmock = require('./tmock')
const defExitCode = process.exitCode
@@ -113,6 +113,7 @@ const setupMockNpm = async (t, {
// preload a command
command = null, // string name of the command
exec = null, // optionally exec the command before returning
+ setCmd = false,
// test dirs
prefixDir = {},
homeDir = {},
@@ -212,7 +213,7 @@ const setupMockNpm = async (t, {
return acc
}, { argv: [...rawArgv], env: {}, config: {} })
- mockGlobals(t, {
+ const mockedGlobals = mockGlobals(t, {
'process.env.HOME': dirs.home,
// global prefix cannot be (easily) set via argv so this is the easiest way
// to set it that also closely mimics the behavior a user would see since it
@@ -251,16 +252,25 @@ const setupMockNpm = async (t, {
const mockCommand = {}
if (command) {
- const cmd = await npm.cmd(command)
- const usage = await cmd.usage
- mockCommand.cmd = cmd
+ const Cmd = mockNpm.Npm.cmd(command)
+ if (setCmd) {
+ // XXX(hack): This is a hack to allow fake-ish tests to set the currently
+ // running npm command without running exec. Generally, we should rely on
+ // actually exec-ing the command to asserting the state of the world
+ // through what is printed/on disk/etc. This is a stop-gap to allow tests
+ // that are time intensive to convert to continue setting the npm command
+ // this way. TODO: remove setCmd from all tests and remove the setCmd
+ // method from `lib/npm.js`
+ npm.setCmd(command)
+ }
+ mockCommand.cmd = new Cmd(npm)
mockCommand[command] = {
- usage,
+ usage: Cmd.describeUsage,
exec: (args) => npm.exec(command, args),
- completion: (args) => cmd.completion(args),
+ completion: (args) => Cmd.completion(args, npm),
}
if (exec) {
- await mockCommand[command].exec(exec)
+ await mockCommand[command].exec(exec === true ? [] : exec)
// assign string output to the command now that we have it
// for easier testing
mockCommand[command].output = mockNpm.joinedOutput()
@@ -269,6 +279,7 @@ const setupMockNpm = async (t, {
return {
npm,
+ mockedGlobals,
...mockNpm,
...dirs,
...mockCommand,
diff --git a/deps/npm/test/fixtures/sandbox.js b/deps/npm/test/fixtures/sandbox.js
index 460609628c8abb..01a5e562fd9b26 100644
--- a/deps/npm/test/fixtures/sandbox.js
+++ b/deps/npm/test/fixtures/sandbox.js
@@ -328,8 +328,8 @@ class Sandbox extends EventEmitter {
this[_npm].output = (...args) => this[_output].push(args)
await this[_npm].load()
- const impl = await this[_npm].cmd(command)
- return impl.completion({
+ const Cmd = Npm.cmd(command)
+ return Cmd.completion({
partialWord: partial,
conf: {
argv: {
diff --git a/deps/npm/test/lib/cli-entry.js b/deps/npm/test/lib/cli-entry.js
new file mode 100644
index 00000000000000..b436304c30047a
--- /dev/null
+++ b/deps/npm/test/lib/cli-entry.js
@@ -0,0 +1,168 @@
+const t = require('tap')
+const { load: loadMockNpm } = require('../fixtures/mock-npm.js')
+const tmock = require('../fixtures/tmock.js')
+const validateEngines = require('../../lib/es6/validate-engines.js')
+
+const cliMock = async (t, opts) => {
+ let exitHandlerArgs = null
+ let npm = null
+ const exitHandlerMock = (...args) => {
+ exitHandlerArgs = args
+ npm.unload()
+ }
+ exitHandlerMock.setNpm = _npm => npm = _npm
+
+ const { Npm, outputs, logMocks, logs } = await loadMockNpm(t, { ...opts, init: false })
+ const cli = tmock(t, '{LIB}/cli-entry.js', {
+ '{LIB}/npm.js': Npm,
+ '{LIB}/utils/exit-handler.js': exitHandlerMock,
+ ...logMocks,
+ })
+
+ return {
+ Npm,
+ cli: (p) => validateEngines(p, () => cli),
+ outputs,
+ exitHandlerCalled: () => exitHandlerArgs,
+ exitHandlerNpm: () => npm,
+ logs,
+ logsBy: (title) => logs.verbose.filter(([p]) => p === title).map(([p, ...rest]) => rest),
+ }
+}
+
+t.test('print the version, and treat npm_g as npm -g', async t => {
+ const { logsBy, logs, cli, Npm, outputs, exitHandlerCalled } = await cliMock(t, {
+ globals: { 'process.argv': ['node', 'npm_g', '-v'] },
+ })
+ await cli(process)
+
+ t.strictSame(process.argv, ['node', 'npm', '-g', '-v'], 'system process.argv was rewritten')
+ t.strictSame(logsBy('cli'), [['node npm']])
+ t.strictSame(logsBy('title'), [['npm']])
+ t.match(logsBy('argv'), [['"--global" "--version"']])
+ t.strictSame(logs.info, [
+ ['using', 'npm@%s', Npm.version],
+ ['using', 'node@%s', process.version],
+ ])
+ t.equal(outputs.length, 1)
+ t.strictSame(outputs, [[Npm.version]])
+ t.strictSame(exitHandlerCalled(), [])
+})
+
+t.test('calling with --versions calls npm version with no args', async t => {
+ const { logsBy, cli, outputs, exitHandlerCalled } = await cliMock(t, {
+ globals: {
+ 'process.argv': ['node', 'npm', 'install', 'or', 'whatever', '--versions'],
+ },
+ })
+ await cli(process)
+
+ t.equal(process.title, 'npm install or whatever')
+ t.strictSame(logsBy('cli'), [['node npm']])
+ t.strictSame(logsBy('title'), [['npm install or whatever']])
+ t.match(logsBy('argv'), [['"install" "or" "whatever" "--versions"']])
+ t.equal(outputs.length, 1)
+ t.match(outputs[0][0], { npm: String, node: String, v8: String })
+ t.strictSame(exitHandlerCalled(), [])
+})
+
+t.test('logged argv is sanitized', async t => {
+ const { logsBy, cli } = await cliMock(t, {
+ globals: {
+ 'process.argv': [
+ 'node',
+ 'npm',
+ 'version',
+ '--registry',
+ 'https://u:password@npmjs.org/password',
+ ],
+ },
+ })
+
+ await cli(process)
+ t.equal(process.title, 'npm version')
+ t.strictSame(logsBy('cli'), [['node npm']])
+ t.strictSame(logsBy('title'), [['npm version']])
+ t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/password"']])
+})
+
+t.test('logged argv is sanitized with equals', async t => {
+ const { logsBy, cli } = await cliMock(t, {
+ globals: {
+ 'process.argv': [
+ 'node',
+ 'npm',
+ 'version',
+ '--registry=https://u:password@npmjs.org',
+ ],
+ },
+ })
+ await cli(process)
+
+ t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/"']])
+})
+
+t.test('print usage if no params provided', async t => {
+ const { cli, outputs, exitHandlerCalled, exitHandlerNpm } = await cliMock(t, {
+ globals: {
+ 'process.argv': ['node', 'npm'],
+ },
+ })
+ await cli(process)
+
+ t.match(outputs[0][0], 'Usage:', 'outputs npm usage')
+ t.match(exitHandlerCalled(), [], 'should call exitHandler with no args')
+ t.ok(exitHandlerNpm(), 'exitHandler npm is set')
+ t.match(process.exitCode, 1)
+})
+
+t.test('print usage if non-command param provided', async t => {
+ const { cli, outputs, exitHandlerCalled, exitHandlerNpm } = await cliMock(t, {
+ globals: {
+ 'process.argv': ['node', 'npm', 'tset'],
+ },
+ })
+ await cli(process)
+
+ t.match(outputs[0][0], 'Unknown command: "tset"')
+ t.match(outputs[0][0], 'Did you mean this?')
+ t.match(exitHandlerCalled(), [], 'should call exitHandler with no args')
+ t.ok(exitHandlerNpm(), 'exitHandler npm is set')
+ t.match(process.exitCode, 1)
+})
+
+t.test('load error calls error handler', async t => {
+ const err = new Error('test load error')
+ const { cli, exitHandlerCalled } = await cliMock(t, {
+ mocks: {
+ '{LIB}/utils/config/index.js': {
+ definitions: null,
+ flatten: null,
+ shorthands: null,
+ },
+ '@npmcli/config': class BadConfig {
+ async load () {
+ throw err
+ }
+ },
+ },
+ globals: {
+ 'process.argv': ['node', 'npm', 'asdf'],
+ },
+ })
+ await cli(process)
+ t.strictSame(exitHandlerCalled(), [err])
+})
+
+t.test('unsupported node version', async t => {
+ const { cli, logs } = await cliMock(t, {
+ globals: {
+ 'process.version': '12.6.0',
+ },
+ })
+ await cli(process)
+ t.match(
+ logs.warn[0][1],
+ /npm v.* does not support Node\.js 12\.6\.0\./
+ )
+})
diff --git a/deps/npm/test/lib/cli.js b/deps/npm/test/lib/cli.js
index cafd13feabe93d..a6cb576e886ee9 100644
--- a/deps/npm/test/lib/cli.js
+++ b/deps/npm/test/lib/cli.js
@@ -1,167 +1,10 @@
const t = require('tap')
-const { load: loadMockNpm } = require('../fixtures/mock-npm.js')
const tmock = require('../fixtures/tmock')
-const cliMock = async (t, opts) => {
- let exitHandlerArgs = null
- let npm = null
- const exitHandlerMock = (...args) => {
- exitHandlerArgs = args
- npm.unload()
- }
- exitHandlerMock.setNpm = _npm => npm = _npm
-
- const { Npm, outputs, logMocks, logs } = await loadMockNpm(t, { ...opts, init: false })
+t.test('returns cli-entry function', async t => {
const cli = tmock(t, '{LIB}/cli.js', {
- '{LIB}/npm.js': Npm,
- '{LIB}/utils/exit-handler.js': exitHandlerMock,
- ...logMocks,
- })
-
- return {
- Npm,
- cli,
- outputs,
- exitHandlerCalled: () => exitHandlerArgs,
- exitHandlerNpm: () => npm,
- logs,
- logsBy: (title) => logs.verbose.filter(([p]) => p === title).map(([p, ...rest]) => rest),
- }
-}
-
-t.test('print the version, and treat npm_g as npm -g', async t => {
- const { logsBy, logs, cli, Npm, outputs, exitHandlerCalled } = await cliMock(t, {
- globals: { 'process.argv': ['node', 'npm_g', '-v'] },
- })
- await cli(process)
-
- t.strictSame(process.argv, ['node', 'npm', '-g', '-v'], 'system process.argv was rewritten')
- t.strictSame(logsBy('cli'), [['node npm']])
- t.strictSame(logsBy('title'), [['npm']])
- t.match(logsBy('argv'), [['"--global" "--version"']])
- t.strictSame(logs.info, [
- ['using', 'npm@%s', Npm.version],
- ['using', 'node@%s', process.version],
- ])
- t.equal(outputs.length, 1)
- t.strictSame(outputs, [[Npm.version]])
- t.strictSame(exitHandlerCalled(), [])
-})
-
-t.test('calling with --versions calls npm version with no args', async t => {
- const { logsBy, cli, outputs, exitHandlerCalled } = await cliMock(t, {
- globals: {
- 'process.argv': ['node', 'npm', 'install', 'or', 'whatever', '--versions'],
- },
- })
- await cli(process)
-
- t.equal(process.title, 'npm install or whatever')
- t.strictSame(logsBy('cli'), [['node npm']])
- t.strictSame(logsBy('title'), [['npm install or whatever']])
- t.match(logsBy('argv'), [['"install" "or" "whatever" "--versions"']])
- t.equal(outputs.length, 1)
- t.match(outputs[0][0], { npm: String, node: String, v8: String })
- t.strictSame(exitHandlerCalled(), [])
-})
-
-t.test('logged argv is sanitized', async t => {
- const { logsBy, cli } = await cliMock(t, {
- globals: {
- 'process.argv': [
- 'node',
- 'npm',
- 'version',
- '--registry',
- 'https://u:password@npmjs.org/password',
- ],
- },
- })
-
- await cli(process)
- t.equal(process.title, 'npm version')
- t.strictSame(logsBy('cli'), [['node npm']])
- t.strictSame(logsBy('title'), [['npm version']])
- t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/password"']])
-})
-
-t.test('logged argv is sanitized with equals', async t => {
- const { logsBy, cli } = await cliMock(t, {
- globals: {
- 'process.argv': [
- 'node',
- 'npm',
- 'version',
- '--registry=https://u:password@npmjs.org',
- ],
- },
- })
- await cli(process)
-
- t.match(logsBy('argv'), [['"version" "--registry" "https://u:***@npmjs.org/"']])
-})
-
-t.test('print usage if no params provided', async t => {
- const { cli, outputs, exitHandlerCalled, exitHandlerNpm } = await cliMock(t, {
- globals: {
- 'process.argv': ['node', 'npm'],
- },
+ '{LIB}/cli-entry.js': () => 'ENTRY',
})
- await cli(process)
-
- t.match(outputs[0][0], 'Usage:', 'outputs npm usage')
- t.match(exitHandlerCalled(), [], 'should call exitHandler with no args')
- t.ok(exitHandlerNpm(), 'exitHandler npm is set')
- t.match(process.exitCode, 1)
-})
-t.test('print usage if non-command param provided', async t => {
- const { cli, outputs, exitHandlerCalled, exitHandlerNpm } = await cliMock(t, {
- globals: {
- 'process.argv': ['node', 'npm', 'tset'],
- },
- })
- await cli(process)
-
- t.match(outputs[0][0], 'Unknown command: "tset"')
- t.match(outputs[0][0], 'Did you mean this?')
- t.match(exitHandlerCalled(), [], 'should call exitHandler with no args')
- t.ok(exitHandlerNpm(), 'exitHandler npm is set')
- t.match(process.exitCode, 1)
-})
-
-t.test('load error calls error handler', async t => {
- const err = new Error('test load error')
- const { cli, exitHandlerCalled } = await cliMock(t, {
- mocks: {
- '{LIB}/utils/config/index.js': {
- definitions: null,
- flatten: null,
- shorthands: null,
- },
- '@npmcli/config': class BadConfig {
- async load () {
- throw err
- }
- },
- },
- globals: {
- 'process.argv': ['node', 'npm', 'asdf'],
- },
- })
- await cli(process)
- t.strictSame(exitHandlerCalled(), [err])
-})
-
-t.test('unsupported node version', async t => {
- const { cli, logs } = await cliMock(t, {
- globals: {
- 'process.version': '12.6.0',
- },
- })
- await cli(process)
- t.match(
- logs.warn[0][1],
- /npm v.* does not support Node\.js 12\.6\.0\./
- )
+ t.equal(cli(process), 'ENTRY')
})
diff --git a/deps/npm/test/lib/commands/access.js b/deps/npm/test/lib/commands/access.js
index d1839aaaef2199..7aec33701297ca 100644
--- a/deps/npm/test/lib/commands/access.js
+++ b/deps/npm/test/lib/commands/access.js
@@ -7,8 +7,7 @@ const token = 'test-auth-token'
const auth = { '//registry.npmjs.org/:_authToken': 'test-auth-token' }
t.test('completion', async t => {
- const { npm } = await loadMockNpm(t)
- const access = await npm.cmd('access')
+ const { access } = await loadMockNpm(t, { command: 'access' })
const testComp = (argv, expect) => {
const res = access.completion({ conf: { argv: { remain: argv } } })
t.resolves(res, expect, argv.join(' '))
diff --git a/deps/npm/test/lib/commands/adduser.js b/deps/npm/test/lib/commands/adduser.js
index ddfbb945b2fcf5..410e8c4987ca64 100644
--- a/deps/npm/test/lib/commands/adduser.js
+++ b/deps/npm/test/lib/commands/adduser.js
@@ -4,27 +4,47 @@ const path = require('path')
const ini = require('ini')
const { load: loadMockNpm } = require('../../fixtures/mock-npm.js')
-const mockGlobals = require('../../fixtures/mock-globals.js')
+const mockGlobals = require('@npmcli/mock-globals')
const MockRegistry = require('@npmcli/mock-registry')
const stream = require('stream')
+const mockAddUser = async (t, { stdin: stdinLines, registry: registryUrl, ...options } = {}) => {
+ let stdin
+ if (stdinLines) {
+ stdin = new stream.PassThrough()
+ for (const l of stdinLines) {
+ stdin.write(l + '\n')
+ }
+ mockGlobals(t, {
+ 'process.stdin': stdin,
+ 'process.stdout': new stream.PassThrough(), // to quiet readline
+ }, { replace: true })
+ }
+ const mock = await loadMockNpm(t, {
+ ...options,
+ command: 'adduser',
+ })
+ const registry = new MockRegistry({
+ tap: t,
+ registry: registryUrl ?? mock.npm.config.get('registry'),
+ })
+ return {
+ registry,
+ stdin,
+ rc: () => ini.parse(fs.readFileSync(path.join(mock.home, '.npmrc'), 'utf8')),
+ ...mock,
+ }
+}
+
t.test('usage', async t => {
- const { npm } = await loadMockNpm(t)
- const adduser = await npm.cmd('adduser')
+ const { adduser } = await loadMockNpm(t, { command: 'adduser' })
t.match(adduser.usage, 'adduser', 'usage has command name in it')
})
t.test('legacy', async t => {
t.test('simple adduser', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- stdin.write('test-email@npmjs.org\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, rc, registry, adduser } = await mockAddUser(t, {
+ stdin: ['test-user', 'test-password', 'test-email@npmjs.org'],
config: { 'auth-type': 'legacy' },
homeDir: {
'.npmrc': [
@@ -34,71 +54,48 @@ t.test('legacy', async t => {
].join('\n'),
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.couchadduser({
username: 'test-user',
password: 'test-password',
email: 'test-email@npmjs.org',
token: 'npm_test-token',
})
- await npm.exec('adduser', [])
+ await adduser.exec([])
t.same(npm.config.get('email'), 'test-email-old@npmjs.org')
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
email: 'test-email-old@npmjs.org',
}, 'should only have token and un-nerfed old email')
})
t.test('scoped adduser', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- stdin.write('test-email@npmjs.org\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, rc, registry, adduser } = await mockAddUser(t, {
+ stdin: ['test-user', 'test-password', 'test-email@npmjs.org'],
config: {
'auth-type': 'legacy',
scope: '@myscope',
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.couchadduser({
username: 'test-user',
password: 'test-password',
email: 'test-email@npmjs.org',
token: 'npm_test-token',
})
- await npm.exec('adduser', [])
+ await adduser.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
t.same(npm.config.get('@myscope:registry'), 'https://registry.npmjs.org/')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
'@myscope:registry': 'https://registry.npmjs.org/',
}, 'should only have token and scope:registry')
})
t.test('scoped adduser with valid scoped registry config', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- stdin.write('test-email@npmjs.org\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, rc, registry, adduser } = await mockAddUser(t, {
+ stdin: ['test-user', 'test-password', 'test-email@npmjs.org'],
+ registry: 'https://diff-registry.npmjs.org',
homeDir: {
'.npmrc': '@myscope:registry=https://diff-registry.npmjs.org',
},
@@ -107,106 +104,70 @@ t.test('legacy', async t => {
scope: '@myscope',
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: 'https://diff-registry.npmjs.org',
- })
registry.couchadduser({
username: 'test-user',
password: 'test-password',
email: 'test-email@npmjs.org',
token: 'npm_test-token',
})
- await npm.exec('adduser', [])
+ await adduser.exec([])
t.same(npm.config.get('//diff-registry.npmjs.org/:_authToken'), 'npm_test-token')
t.same(npm.config.get('@myscope:registry'), 'https://diff-registry.npmjs.org')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'@myscope:registry': 'https://diff-registry.npmjs.org',
'//diff-registry.npmjs.org/:_authToken': 'npm_test-token',
}, 'should only have token and scope:registry')
})
t.test('save config failure', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- stdin.write('test-email@npmjs.org\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm } = await loadMockNpm(t, {
+ const { registry, adduser } = await mockAddUser(t, {
+ stdin: ['test-user', 'test-password', 'test-email@npmjs.org'],
config: { 'auth-type': 'legacy' },
homeDir: {
'.npmrc': {},
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.couchadduser({
username: 'test-user',
password: 'test-password',
email: 'test-email@npmjs.org',
token: 'npm_test-token',
})
- await t.rejects(npm.exec('adduser', []))
+ await t.rejects(adduser.exec([]))
})
t.end()
})
t.test('web', t => {
t.test('basic adduser', async t => {
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, rc, registry, adduser } = await mockAddUser(t, {
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.webadduser({ token: 'npm_test-token' })
- await npm.exec('adduser', [])
+ await adduser.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
})
})
t.test('server error', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { adduser, registry } = await mockAddUser(t, {
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.nock.post(registry.fullPath('/-/v1/login'))
.reply(503, {})
await t.rejects(
- npm.exec('adduser', []),
+ adduser.exec([]),
{ message: /503/ }
)
})
t.test('fallback', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- stdin.write('test-email@npmjs.org\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm } = await loadMockNpm(t, {
+ const { npm, registry, adduser } = await mockAddUser(t, {
+ stdin: ['test-user', 'test-password', 'test-email@npmjs.org'],
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.nock.post(registry.fullPath('/-/v1/login'))
.reply(404, {})
registry.couchadduser({
@@ -215,7 +176,7 @@ t.test('web', t => {
email: 'test-email@npmjs.org',
token: 'npm_test-token',
})
- await npm.exec('adduser', [])
+ await adduser.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
})
t.end()
diff --git a/deps/npm/test/lib/commands/audit.js b/deps/npm/test/lib/commands/audit.js
index 5c82fa14de32c1..4014e733873519 100644
--- a/deps/npm/test/lib/commands/audit.js
+++ b/deps/npm/test/lib/commands/audit.js
@@ -3,6 +3,7 @@ const zlib = require('zlib')
const path = require('path')
const t = require('tap')
+const { default: tufmock } = require('@tufjs/repo-mock')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
const MockRegistry = require('@npmcli/mock-registry')
@@ -210,8 +211,7 @@ t.test('audit fix - bulk endpoint', async t => {
})
t.test('completion', async t => {
- const { npm } = await loadMockNpm(t)
- const audit = await npm.cmd('audit')
+ const { audit } = await loadMockNpm(t, { command: 'audit' })
t.test('fix', async t => {
await t.resolveMatch(
audit.completion({ conf: { argv: { remain: ['npm', 'audit'] } } }),
@@ -247,28 +247,69 @@ t.test('audit signatures', async t => {
}],
}
- const MISMATCHING_REGISTRY_KEYS = {
+ const TUF_VALID_REGISTRY_KEYS = {
keys: [{
- expires: null,
- keyid: 'SHA256:2l3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
- keytype: 'ecdsa-sha2-nistp256',
- scheme: 'ecdsa-sha2-nistp256',
- key: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
+ keyId: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
+ keyUsage: 'npm:signatures',
+ publicKey: {
+ rawBytes: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
+ keyDetails: 'PKIX_ECDSA_P256_SHA_256',
+ validFor: {
+ start: '1999-01-01T00:00:00.000Z',
+ },
+ },
}],
}
- const EXPIRED_REGISTRY_KEYS = {
+ const TUF_MISMATCHING_REGISTRY_KEYS = {
keys: [{
- expires: '2021-01-11T15:45:42.144Z',
- keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
- keytype: 'ecdsa-sha2-nistp256',
- scheme: 'ecdsa-sha2-nistp256',
- key: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
+ keyId: 'SHA256:2l3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
+ keyUsage: 'npm:signatures',
+ publicKey: {
+ rawBytes: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
+ 'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
+ keyDetails: 'PKIX_ECDSA_P256_SHA_256',
+ validFor: {
+ start: '1999-01-01T00:00:00.000Z',
+ },
+ },
+ }],
+ }
+
+ const TUF_EXPIRED_REGISTRY_KEYS = {
+ keys: [{
+ keyId: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
+ keyUsage: 'npm:signatures',
+ publicKey: {
+ rawBytes: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
+ keyDetails: 'PKIX_ECDSA_P256_SHA_256',
+ validFor: {
+ start: '1999-01-01T00:00:00.000Z',
+ end: '2021-01-11T15:45:42.144Z',
+ },
+ },
}],
}
+ const TUF_VALID_KEYS_TARGET = {
+ name: 'registry.npmjs.org/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ }
+
+ const TUF_MISMATCHING_KEYS_TARGET = {
+ name: 'registry.npmjs.org/keys.json',
+ content: JSON.stringify(TUF_MISMATCHING_REGISTRY_KEYS),
+ }
+
+ const TUF_EXPIRED_KEYS_TARGET = {
+ name: 'registry.npmjs.org/keys.json',
+ content: JSON.stringify(TUF_EXPIRED_REGISTRY_KEYS),
+ }
+
+ const TUF_TARGET_NOT_FOUND = []
+
const installWithValidSigs = {
'package.json': JSON.stringify({
name: 'test-dep',
@@ -882,13 +923,22 @@ t.test('audit signatures', async t => {
await registry.package({ manifest })
}
+ function mockTUF ({ target, npm }) {
+ const opts = {
+ baseURL: 'https://tuf-repo-cdn.sigstore.dev',
+ metadataPathPrefix: '',
+ cachePath: path.join(npm.cache, '_tuf'),
+ }
+ return tufmock(target, opts)
+ }
+
t.test('with valid signatures', async t => {
const { npm, joinedOutput } = await loadMockNpm(t, {
prefixDir: installWithValidSigs,
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -921,6 +971,22 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
+
+ await npm.exec('audit', ['signatures'])
+
+ t.notOk(process.exitCode, 'should exit successfully')
+ t.match(joinedOutput(), /audited 1 package/)
+ t.matchSnapshot(joinedOutput())
+ })
+
+ t.test('with key fallback to legacy API', async t => {
+ const { npm, joinedOutput } = await loadMockNpm(t, {
+ prefixDir: installWithValidSigs,
+ })
+ const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
+ await manifestWithValidSigs({ registry })
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
await npm.exec('audit', ['signatures'])
@@ -1027,7 +1093,7 @@ t.test('audit signatures', async t => {
})
await registry.package({ manifest: asyncManifest })
await manifestWithInvalidSigs({ registry, name: 'node-fetch', version: '1.6.0' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1044,7 +1110,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1059,7 +1125,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1076,7 +1142,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
await manifestWithoutSigs({ registry, name: 'async', version: '1.1.1' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1094,7 +1160,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry })
await manifestWithoutSigs({ registry, name: 'async', version: '1.1.1' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1112,7 +1178,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry, name: 'kms-demo', version: '1.0.0' })
await manifestWithInvalidSigs({ registry, name: 'async', version: '1.1.1' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1127,7 +1193,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithoutSigs({ registry, name: 'kms-demo', version: '1.0.0' })
await manifestWithoutSigs({ registry, name: 'async', version: '1.1.1' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1141,6 +1207,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
registry.nock.get('/-/npm/v1/keys').reply(404)
await t.rejects(
@@ -1156,7 +1223,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, EXPIRED_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_EXPIRED_KEYS_TARGET })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1171,7 +1238,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, MISMATCHING_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_MISMATCHING_KEYS_TARGET })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1186,7 +1253,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithoutSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1204,7 +1271,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithoutSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1225,7 +1292,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1243,7 +1310,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1261,7 +1328,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry })
await manifestWithoutSigs({ registry, name: 'async', version: '1.1.1' })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1278,7 +1345,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1309,6 +1376,7 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
registry.nock.get('/-/npm/v1/keys').reply(404)
await t.rejects(
@@ -1339,6 +1407,7 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
registry.nock.get('/-/npm/v1/keys').reply(400)
await t.rejects(
@@ -1377,17 +1446,11 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
- registry.nock.get('/-/npm/v1/keys')
- .reply(200, {
- keys: [{
- expires: null,
- keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
- keytype: 'ecdsa-sha2-nistp256',
- scheme: 'ecdsa-sha2-nistp256',
- key: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
- 'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
- }],
- })
+ mockTUF({ npm,
+ target: {
+ name: 'verdaccio-clone.org/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ } })
await npm.exec('audit', ['signatures'])
@@ -1425,17 +1488,11 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
- registry.nock.get('/-/npm/v1/keys')
- .reply(200, {
- keys: [{
- expires: null,
- keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
- keytype: 'ecdsa-sha2-nistp256',
- scheme: 'ecdsa-sha2-nistp256',
- key: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
- 'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
- }],
- })
+ mockTUF({ npm,
+ target: {
+ name: 'verdaccio-clone.org/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ } })
await npm.exec('audit', ['signatures'])
@@ -1467,17 +1524,11 @@ t.test('audit signatures', async t => {
}],
})
await registry.package({ manifest })
- registry.nock.get('/-/npm/v1/keys')
- .reply(200, {
- keys: [{
- expires: null,
- keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
- keytype: 'ecdsa-sha2-nistp256',
- scheme: 'ecdsa-sha2-nistp256',
- key: 'MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE1Olb3zMAFFxXKHiIkQO5cJ3Yhl5i6UPp+' +
- 'IhuteBJbuHcA5UogKo0EWtlWwW6KSaKoTNEYL7JlCQiVnkhBktUgg==',
- }],
- })
+ mockTUF({ npm,
+ target: {
+ name: 'verdaccio-clone.org/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ } })
await npm.exec('audit', ['signatures'])
@@ -1486,6 +1537,94 @@ t.test('audit signatures', async t => {
t.matchSnapshot(joinedOutput())
})
+ t.test('third-party registry with sub-path', async t => {
+ const registryUrl = 'https://verdaccio-clone.org/npm'
+ const { npm, joinedOutput } = await loadMockNpm(t, {
+ prefixDir: installWithThirdPartyRegistry,
+ config: {
+ scope: '@npmcli',
+ registry: registryUrl,
+ },
+ })
+ const registry = new MockRegistry({ tap: t, registry: registryUrl })
+
+ const manifest = registry.manifest({
+ name: '@npmcli/arborist',
+ packuments: [{
+ version: '1.0.14',
+ dist: {
+ tarball: 'https://registry.npmjs.org/@npmcli/arborist/-/@npmcli/arborist-1.0.14.tgz',
+ integrity: 'sha512-caa8hv5rW9VpQKk6tyNRvSaVDySVjo9GkI7Wj/wcsFyxPm3tYrE' +
+ 'sFyTjSnJH8HCIfEGVQNjqqKXaXLFVp7UBag==',
+ signatures: [
+ {
+ keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
+ sig: 'MEUCIAvNpR3G0j7WOPUuVMhE0ZdM8PnDNcsoeFD8Iwz9YWIMAiEAn8cicDC2' +
+ 'Sf9MFQydqTv6S5XYsAh9Af1sig1nApNI11M=',
+ },
+ ],
+ },
+ }],
+ })
+ await registry.package({ manifest })
+
+ mockTUF({ npm,
+ target: {
+ name: 'verdaccio-clone.org/npm/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ } })
+
+ await npm.exec('audit', ['signatures'])
+
+ t.notOk(process.exitCode, 'should exit successfully')
+ t.match(joinedOutput(), /audited 1 package/)
+ t.matchSnapshot(joinedOutput())
+ })
+
+ t.test('third-party registry with sub-path (trailing slash)', async t => {
+ const registryUrl = 'https://verdaccio-clone.org/npm/'
+ const { npm, joinedOutput } = await loadMockNpm(t, {
+ prefixDir: installWithThirdPartyRegistry,
+ config: {
+ scope: '@npmcli',
+ registry: registryUrl,
+ },
+ })
+ const registry = new MockRegistry({ tap: t, registry: registryUrl })
+
+ const manifest = registry.manifest({
+ name: '@npmcli/arborist',
+ packuments: [{
+ version: '1.0.14',
+ dist: {
+ tarball: 'https://registry.npmjs.org/@npmcli/arborist/-/@npmcli/arborist-1.0.14.tgz',
+ integrity: 'sha512-caa8hv5rW9VpQKk6tyNRvSaVDySVjo9GkI7Wj/wcsFyxPm3tYrE' +
+ 'sFyTjSnJH8HCIfEGVQNjqqKXaXLFVp7UBag==',
+ signatures: [
+ {
+ keyid: 'SHA256:jl3bwswu80PjjokCgh0o2w5c2U4LhQAE57gj9cz1kzA',
+ sig: 'MEUCIAvNpR3G0j7WOPUuVMhE0ZdM8PnDNcsoeFD8Iwz9YWIMAiEAn8cicDC2' +
+ 'Sf9MFQydqTv6S5XYsAh9Af1sig1nApNI11M=',
+ },
+ ],
+ },
+ }],
+ })
+ await registry.package({ manifest })
+
+ mockTUF({ npm,
+ target: {
+ name: 'verdaccio-clone.org/npm/keys.json',
+ content: JSON.stringify(TUF_VALID_REGISTRY_KEYS),
+ } })
+
+ await npm.exec('audit', ['signatures'])
+
+ t.notOk(process.exitCode, 'should exit successfully')
+ t.match(joinedOutput(), /audited 1 package/)
+ t.matchSnapshot(joinedOutput())
+ })
+
t.test('multiple registries with keys and signatures', async t => {
const registryUrl = 'https://verdaccio-clone.org'
const { npm, joinedOutput } = await loadMockNpm(t, {
@@ -1500,7 +1639,7 @@ t.test('audit signatures', async t => {
registry: registryUrl,
})
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
const manifest = thirdPartyRegistry.manifest({
name: '@npmcli/arborist',
@@ -1556,11 +1695,36 @@ t.test('audit signatures', async t => {
)
})
+ t.test('errors when TUF errors', async t => {
+ const { npm } = await loadMockNpm(t, {
+ prefixDir: installWithMultipleDeps,
+ mocks: {
+ sigstore: {
+ sigstore: {
+ tuf: {
+ client: async () => ({
+ getTarget: async () => {
+ throw new Error('error refreshing TUF metadata')
+ },
+ }),
+ },
+ },
+ },
+ },
+ })
+
+ await t.rejects(
+ npm.exec('audit', ['signatures']),
+ /error refreshing TUF metadata/
+ )
+ })
+
t.test('errors when the keys endpoint errors', async t => {
const { npm } = await loadMockNpm(t, {
prefixDir: installWithMultipleDeps,
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
registry.nock.get('/-/npm/v1/keys')
.reply(500, { error: 'keys broke' })
@@ -1577,7 +1741,7 @@ t.test('audit signatures', async t => {
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithValidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1590,8 +1754,7 @@ t.test('audit signatures', async t => {
const { npm } = await loadMockNpm(t, {
prefixDir: noInstall,
})
- const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1612,8 +1775,7 @@ t.test('audit signatures', async t => {
node_modules: {},
},
})
- const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1641,6 +1803,7 @@ t.test('audit signatures', async t => {
},
},
})
+ mockTUF({ npm, target: TUF_TARGET_NOT_FOUND })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1669,8 +1832,7 @@ t.test('audit signatures', async t => {
},
})
- const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await t.rejects(
npm.exec('audit', ['signatures']),
@@ -1697,7 +1859,7 @@ t.test('audit signatures', async t => {
})
const registry = new MockRegistry({ tap: t, registry: npm.config.get('registry') })
await manifestWithInvalidSigs({ registry })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1728,7 +1890,7 @@ t.test('audit signatures', async t => {
'utf8'
)
registry.nock.get('/-/npm/v1/attestations/sigstore@1.0.0').reply(200, fixture)
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1761,7 +1923,7 @@ t.test('audit signatures', async t => {
)
registry.nock.get('/-/npm/v1/attestations/sigstore@1.0.0').reply(200, fixture1)
registry.nock.get('/-/npm/v1/attestations/tuf-js@1.0.0').reply(200, fixture2)
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1791,7 +1953,7 @@ t.test('audit signatures', async t => {
'utf8'
)
registry.nock.get('/-/npm/v1/attestations/sigstore@1.0.0').reply(200, fixture)
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1828,7 +1990,7 @@ t.test('audit signatures', async t => {
'utf8'
)
registry.nock.get('/-/npm/v1/attestations/sigstore@1.0.0').reply(200, fixture)
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1865,7 +2027,7 @@ t.test('audit signatures', async t => {
)
registry.nock.get('/-/npm/v1/attestations/sigstore@1.0.0').reply(200, fixture1)
registry.nock.get('/-/npm/v1/attestations/tuf-js@1.0.0').reply(200, fixture2)
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1922,7 +2084,7 @@ t.test('audit signatures', async t => {
})
await registry.package({ manifest: asyncManifest })
await registry.package({ manifest: lightCycleManifest })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
@@ -1975,7 +2137,7 @@ t.test('audit signatures', async t => {
})
await registry.package({ manifest: asyncManifest })
await registry.package({ manifest: lightCycleManifest })
- registry.nock.get('/-/npm/v1/keys').reply(200, VALID_REGISTRY_KEYS)
+ mockTUF({ npm, target: TUF_VALID_KEYS_TARGET })
await npm.exec('audit', ['signatures'])
diff --git a/deps/npm/test/lib/commands/bugs.js b/deps/npm/test/lib/commands/bugs.js
index bf45b9eee81ab3..953c8e6345a2a7 100644
--- a/deps/npm/test/lib/commands/bugs.js
+++ b/deps/npm/test/lib/commands/bugs.js
@@ -43,8 +43,7 @@ const pacote = {
}
t.test('usage', async (t) => {
- const { npm } = await loadMockNpm(t)
- const bugs = await npm.cmd('bugs')
+ const { bugs } = await loadMockNpm(t, { command: 'bugs' })
t.match(bugs.usage, 'bugs', 'usage has command name in it')
})
diff --git a/deps/npm/test/lib/commands/cache.js b/deps/npm/test/lib/commands/cache.js
index fe2854f9aa6269..15ee4dc80aa1aa 100644
--- a/deps/npm/test/lib/commands/cache.js
+++ b/deps/npm/test/lib/commands/cache.js
@@ -1,7 +1,6 @@
const t = require('tap')
const { load: loadMockNpm } = require('../../fixtures/mock-npm.js')
const MockRegistry = require('@npmcli/mock-registry')
-const mockGlobals = require('../../fixtures/mock-globals')
const cacache = require('cacache')
const fs = require('fs')
@@ -267,8 +266,9 @@ t.test('cache verify', async t => {
})
t.test('cache verify as part of home', async t => {
- const { npm, joinedOutput, prefix } = await loadMockNpm(t)
- mockGlobals(t, { 'process.env.HOME': path.dirname(prefix) })
+ const { npm, joinedOutput } = await loadMockNpm(t, {
+ globals: ({ prefix }) => ({ 'process.env.HOME': path.dirname(prefix) }),
+ })
await npm.exec('cache', ['verify'])
t.match(joinedOutput(), 'Cache verified and compressed (~', 'contains ~ shorthand')
})
@@ -302,8 +302,7 @@ t.test('cache verify w/ extra output', async t => {
})
t.test('cache completion', async t => {
- const { npm } = await loadMockNpm(t)
- const cache = await npm.cmd('cache')
+ const { cache } = await loadMockNpm(t, { command: 'cache' })
const { completion } = cache
const testComp = (argv, expect) => {
diff --git a/deps/npm/test/lib/commands/completion.js b/deps/npm/test/lib/commands/completion.js
index 6cc1677552e8a7..904d9410fdd6c7 100644
--- a/deps/npm/test/lib/commands/completion.js
+++ b/deps/npm/test/lib/commands/completion.js
@@ -1,30 +1,26 @@
const t = require('tap')
const fs = require('fs')
const path = require('path')
+const { load: loadMockNpm } = require('../../fixtures/mock-npm')
const completionScript = fs
.readFileSync(path.resolve(__dirname, '../../../lib/utils/completion.sh'), { encoding: 'utf8' })
.replace(/^#!.*?\n/, '')
-const { load: loadMockNpm } = require('../../fixtures/mock-npm')
-const mockGlobals = require('../../fixtures/mock-globals')
-
const loadMockCompletion = async (t, o = {}) => {
const { globals = {}, windows, ...options } = o
- let resetGlobals = {}
- resetGlobals = mockGlobals(t, {
- 'process.platform': windows ? 'win32' : 'posix',
- 'process.env.term': 'notcygwin',
- 'process.env.msystem': 'nogmingw',
- ...globals,
- }).reset
const res = await loadMockNpm(t, {
+ command: 'completion',
...options,
+ globals: (dirs) => ({
+ 'process.platform': windows ? 'win32' : 'posix',
+ 'process.env.term': 'notcygwin',
+ 'process.env.msystem': 'nogmingw',
+ ...(typeof globals === 'function' ? globals(dirs) : globals),
+ }),
})
- const completion = await res.npm.cmd('completion')
return {
- resetGlobals,
- completion,
+ resetGlobals: res.mockedGlobals.reset,
...res,
}
}
@@ -40,21 +36,26 @@ const loadMockCompletionComp = async (t, word, line) =>
t.test('completion', async t => {
t.test('completion completion', async t => {
- const { outputs, completion, prefix } = await loadMockCompletion(t, {
+ const { outputs, completion } = await loadMockCompletion(t, {
prefixDir: {
'.bashrc': 'aaa',
'.zshrc': 'aaa',
},
+ globals: ({ prefix }) => ({
+ 'process.env.HOME': prefix,
+ }),
})
- mockGlobals(t, { 'process.env.HOME': prefix })
await completion.completion({ w: 2 })
t.matchSnapshot(outputs, 'both shells')
})
t.test('completion completion no known shells', async t => {
- const { outputs, completion, prefix } = await loadMockCompletion(t)
- mockGlobals(t, { 'process.env.HOME': prefix })
+ const { outputs, completion } = await loadMockCompletion(t, {
+ globals: ({ prefix }) => ({
+ 'process.env.HOME': prefix,
+ }),
+ })
await completion.completion({ w: 2 })
t.matchSnapshot(outputs, 'no responses')
@@ -86,7 +87,7 @@ t.test('completion', async t => {
},
})
- await completion.exec({})
+ await completion.exec()
t.equal(data, completionScript, 'wrote the completion script')
})
@@ -111,7 +112,7 @@ t.test('completion', async t => {
},
})
- await completion.exec({})
+ await completion.exec()
t.equal(data, completionScript, 'wrote the completion script')
})
@@ -190,7 +191,7 @@ t.test('completion', async t => {
t.test('windows without bash', async t => {
const { outputs, completion } = await loadMockCompletion(t, { windows: true })
await t.rejects(
- completion.exec({}),
+ completion.exec(),
{ code: 'ENOTSUP', message: /completion supported only in MINGW/ },
'returns the correct error'
)
diff --git a/deps/npm/test/lib/commands/deprecate.js b/deps/npm/test/lib/commands/deprecate.js
index 48513c7303a015..4ae146fd3aee0d 100644
--- a/deps/npm/test/lib/commands/deprecate.js
+++ b/deps/npm/test/lib/commands/deprecate.js
@@ -12,13 +12,13 @@ const versions = ['1.0.0', '1.0.1', '1.0.1-pre']
const packages = { foo: 'write', bar: 'write', baz: 'write', buzz: 'read' }
t.test('completion', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { npm, deprecate } = await loadMockNpm(t, {
+ command: 'deprecate',
config: {
...auth,
},
})
- const deprecate = await npm.cmd('deprecate')
const testComp = async (argv, expect) => {
const res =
await deprecate.completion({ conf: { argv: { remain: argv } } })
diff --git a/deps/npm/test/lib/commands/diff.js b/deps/npm/test/lib/commands/diff.js
index d9ff9e5dad0e6a..36a9e4bc17d9f1 100644
--- a/deps/npm/test/lib/commands/diff.js
+++ b/deps/npm/test/lib/commands/diff.js
@@ -25,7 +25,9 @@ const mockDiff = async (t, {
...opts
} = {}) => {
const tarballFixtures = Object.entries(tarballs).reduce((acc, [spec, fixture]) => {
- const [name, version] = spec.split('@')
+ const lastAt = spec.lastIndexOf('@')
+ const name = spec.slice(0, lastAt)
+ const version = spec.slice(lastAt + 1)
acc[name] = acc[name] || {}
acc[name][version] = fixture
if (!acc[name][version]['package.json']) {
@@ -39,6 +41,7 @@ const mockDiff = async (t, {
const { prefixDir, globalPrefixDir, otherDirs, config, ...rest } = opts
const { npm, ...res } = await loadMockNpm(t, {
+ command: 'diff',
prefixDir: jsonifyTestdir(prefixDir),
otherDirs: jsonifyTestdir({ tarballs: tarballFixtures, ...otherDirs }),
globalPrefixDir: jsonifyTestdir(globalPrefixDir),
@@ -75,7 +78,7 @@ const mockDiff = async (t, {
}
if (exec) {
- await npm.exec('diff', exec)
+ await res.diff.exec(exec)
res.output = res.joinedOutput()
}
@@ -98,13 +101,13 @@ const assertFoo = async (t, arg) => {
const { output } = await mockDiff(t, {
diff,
prefixDir: {
- 'package.json': { name: 'foo', version: '1.0.0' },
+ 'package.json': { name: '@npmcli/foo', version: '1.0.0' },
'index.js': 'const version = "1.0.0"',
'a.js': 'const a = "a@1.0.0"',
'b.js': 'const b = "b@1.0.0"',
},
tarballs: {
- 'foo@0.1.0': {
+ '@npmcli/foo@0.1.0': {
'index.js': 'const version = "0.1.0"',
'a.js': 'const a = "a@0.1.0"',
'b.js': 'const b = "b@0.1.0"',
@@ -162,17 +165,17 @@ t.test('no args', async t => {
t.test('single arg', async t => {
t.test('spec using cwd package name', async t => {
- await assertFoo(t, 'foo@0.1.0')
+ await assertFoo(t, '@npmcli/foo@0.1.0')
})
t.test('unknown spec, no package.json', async t => {
await rejectDiff(t, /Needs multiple arguments to compare or run from a project dir./, {
- diff: ['foo@1.0.0'],
+ diff: ['@npmcli/foo@1.0.0'],
})
})
t.test('spec using semver range', async t => {
- await assertFoo(t, 'foo@~0.1.0')
+ await assertFoo(t, '@npmcli/foo@~0.1.0')
})
t.test('version', async t => {
@@ -429,17 +432,17 @@ t.test('single arg', async t => {
t.test('use project name in project dir', async t => {
const { output } = await mockDiff(t, {
- diff: 'foo',
+ diff: '@npmcli/foo',
prefixDir: {
- 'package.json': { name: 'foo', version: '1.0.0' },
+ 'package.json': { name: '@npmcli/foo', version: '1.0.0' },
},
tarballs: {
- 'foo@2.2.2': {},
+ '@npmcli/foo@2.2.2': {},
},
exec: [],
})
- t.match(output, 'foo')
+ t.match(output, '@npmcli/foo')
t.match(output, /-\s*"version": "2\.2\.2"/)
t.match(output, /\+\s*"version": "1\.0\.0"/)
})
@@ -448,17 +451,17 @@ t.test('single arg', async t => {
const { output } = await mockDiff(t, {
diff: '../other/other-pkg',
prefixDir: {
- 'package.json': { name: 'foo', version: '1.0.0' },
+ 'package.json': { name: '@npmcli/foo', version: '1.0.0' },
},
otherDirs: {
'other-pkg': {
- 'package.json': { name: 'foo', version: '2.0.0' },
+ 'package.json': { name: '@npmcli/foo', version: '2.0.0' },
},
},
exec: [],
})
- t.match(output, 'foo')
+ t.match(output, '@npmcli/foo')
t.match(output, /-\s*"version": "2\.0\.0"/)
t.match(output, /\+\s*"version": "1\.0\.0"/)
})
diff --git a/deps/npm/test/lib/commands/dist-tag.js b/deps/npm/test/lib/commands/dist-tag.js
index 4cc241f74582d1..1c63ce497d3fb2 100644
--- a/deps/npm/test/lib/commands/dist-tag.js
+++ b/deps/npm/test/lib/commands/dist-tag.js
@@ -77,22 +77,15 @@ const mockDist = async (t, { ...npmOpts } = {}) => {
const mock = await mockNpm(t, {
...npmOpts,
+ command: 'dist-tag',
mocks: {
'npm-registry-fetch': Object.assign(nrf, realFetch, { json: getTag }),
},
})
- const usage = await mock.npm.cmd('dist-tag').then(c => c.usage)
-
return {
...mock,
- distTag: {
- exec: (args) => mock.npm.exec('dist-tag', args),
- usage,
- completion: (remain) => mock.npm.cmd('dist-tag').then(c => c.completion({
- conf: { argv: { remain } },
- })),
- },
+ distTag: mock['dist-tag'],
fetchOpts: () => fetchOpts,
result: () => mock.joinedOutput(),
logs: () => {
@@ -365,10 +358,10 @@ t.test('remove missing pkg name', async t => {
t.test('completion', async t => {
const { distTag } = await mockDist(t)
- const match = distTag.completion(['npm', 'dist-tag'])
+ const match = distTag.completion({ conf: { argv: { remain: ['npm', 'dist-tag'] } } })
t.resolveMatch(match, ['add', 'rm', 'ls'],
'should list npm dist-tag commands for completion')
- const noMatch = distTag.completion(['npm', 'dist-tag', 'foobar'])
+ const noMatch = distTag.completion({ conf: { argv: { remain: ['npm', 'dist-tag', 'foobar'] } } })
t.resolveMatch(noMatch, [])
})
diff --git a/deps/npm/test/lib/commands/doctor.js b/deps/npm/test/lib/commands/doctor.js
index d1a88299e69ae9..1682a6cccfa483 100644
--- a/deps/npm/test/lib/commands/doctor.js
+++ b/deps/npm/test/lib/commands/doctor.js
@@ -4,7 +4,7 @@ const path = require('path')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
const tnock = require('../../fixtures/tnock.js')
-const mockGlobals = require('../../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot.js')
const cleanCacheSha = (str) =>
diff --git a/deps/npm/test/lib/commands/explain.js b/deps/npm/test/lib/commands/explain.js
index 79c917a1cd4527..f4d898797bcbe4 100644
--- a/deps/npm/test/lib/commands/explain.js
+++ b/deps/npm/test/lib/commands/explain.js
@@ -4,6 +4,7 @@ const mockNpm = require('../../fixtures/mock-npm.js')
const mockExplain = async (t, opts) => {
const mock = await mockNpm(t, {
+ command: 'explain',
mocks: {
// keep the snapshots pared down a bit, since this has its own tests.
'{LIB}/utils/explain-dep.js': {
@@ -16,15 +17,7 @@ const mockExplain = async (t, opts) => {
...opts,
})
- const usage = await mock.npm.cmd('explain').then(c => c.usage)
-
- return {
- ...mock,
- explain: {
- usage,
- exec: (args) => mock.npm.exec('explain', args),
- },
- }
+ return mock
}
t.test('no args throws usage', async t => {
diff --git a/deps/npm/test/lib/commands/explore.js b/deps/npm/test/lib/commands/explore.js
index 786a34a8e29882..6988dca90fbfb0 100644
--- a/deps/npm/test/lib/commands/explore.js
+++ b/deps/npm/test/lib/commands/explore.js
@@ -3,18 +3,20 @@ const mockNpm = require('../../fixtures/mock-npm')
const { cleanCwd } = require('../../fixtures/clean-snapshot')
const mockExplore = async (t, exec, {
- RPJ_ERROR = null,
+ PJ_ERROR = null,
RUN_SCRIPT_ERROR = null,
RUN_SCRIPT_EXIT_CODE = 0,
RUN_SCRIPT_SIGNAL = null,
} = {}) => {
- let RPJ_CALLED = ''
- const mockRPJ = async path => {
- if (RPJ_ERROR) {
- throw RPJ_ERROR
- }
- RPJ_CALLED = cleanCwd(path)
- return { some: 'package' }
+ let PJ_CALLED = ''
+ const mockPJ = {
+ normalize: async path => {
+ if (PJ_ERROR) {
+ throw PJ_ERROR
+ }
+ PJ_CALLED = cleanCwd(path)
+ return { content: { some: 'package' } }
+ },
}
let RUN_SCRIPT_EXEC = null
@@ -41,7 +43,7 @@ const mockExplore = async (t, exec, {
const mock = await mockNpm(t, {
mocks: {
- 'read-package-json-fast': mockRPJ,
+ '@npmcli/package-json': mockPJ,
'@npmcli/run-script': mockRunScript,
},
config: {
@@ -53,7 +55,7 @@ const mockExplore = async (t, exec, {
return {
...mock,
- RPJ_CALLED,
+ PJ_CALLED,
RUN_SCRIPT_EXEC,
output: cleanCwd(mock.joinedOutput()).trim(),
}
@@ -62,11 +64,11 @@ const mockExplore = async (t, exec, {
t.test('basic interactive', async t => {
const {
output,
- RPJ_CALLED,
+ PJ_CALLED,
RUN_SCRIPT_EXEC,
} = await mockExplore(t, ['pkg'])
- t.match(RPJ_CALLED, /\/pkg\/package.json$/)
+ t.ok(PJ_CALLED.endsWith('/pkg'))
t.strictSame(RUN_SCRIPT_EXEC, 'shell-command')
t.match(output, /Exploring \{CWD\}\/[\w-_/]+\nType 'exit' or \^D when finished/)
})
@@ -75,11 +77,11 @@ t.test('interactive tracks exit code', async t => {
t.test('code', async t => {
const {
output,
- RPJ_CALLED,
+ PJ_CALLED,
RUN_SCRIPT_EXEC,
} = await mockExplore(t, ['pkg'], { RUN_SCRIPT_EXIT_CODE: 99 })
- t.match(RPJ_CALLED, /\/pkg\/package.json$/)
+ t.ok(PJ_CALLED.endsWith('/pkg'))
t.strictSame(RUN_SCRIPT_EXEC, 'shell-command')
t.match(output, /Exploring \{CWD\}\/[\w-_/]+\nType 'exit' or \^D when finished/)
@@ -123,11 +125,11 @@ t.test('interactive tracks exit code', async t => {
t.test('basic non-interactive', async t => {
const {
output,
- RPJ_CALLED,
+ PJ_CALLED,
RUN_SCRIPT_EXEC,
} = await mockExplore(t, ['pkg', 'ls'])
- t.match(RPJ_CALLED, /\/pkg\/package.json$/)
+ t.ok(PJ_CALLED.endsWith('/pkg'))
t.strictSame(RUN_SCRIPT_EXEC, 'ls')
t.strictSame(output, '')
@@ -164,10 +166,10 @@ t.test('usage if no pkg provided', async t => {
})
t.test('pkg not installed', async t => {
- const RPJ_ERROR = new Error('plurple')
+ const PJ_ERROR = new Error('plurple')
await t.rejects(
- mockExplore(t, ['pkg', 'ls'], { RPJ_ERROR }),
+ mockExplore(t, ['pkg', 'ls'], { PJ_ERROR }),
{ message: 'plurple' }
)
})
diff --git a/deps/npm/test/lib/commands/get.js b/deps/npm/test/lib/commands/get.js
index 597cccc3ff0ba4..dec634f835172e 100644
--- a/deps/npm/test/lib/commands/get.js
+++ b/deps/npm/test/lib/commands/get.js
@@ -2,10 +2,11 @@ const t = require('tap')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
t.test('should retrieve values from config', async t => {
- const { joinedOutput, npm } = await loadMockNpm(t)
const name = 'editor'
const value = 'vigor'
- npm.config.set(name, value)
+ const { joinedOutput, npm } = await loadMockNpm(t, {
+ config: { [name]: value },
+ })
await npm.exec('get', [name])
t.equal(
joinedOutput(),
diff --git a/deps/npm/test/lib/commands/help-search.js b/deps/npm/test/lib/commands/help-search.js
index ce6e5f7cf00b01..8da725fad76924 100644
--- a/deps/npm/test/lib/commands/help-search.js
+++ b/deps/npm/test/lib/commands/help-search.js
@@ -1,6 +1,5 @@
const t = require('tap')
const { load: loadMockNpm } = require('../../fixtures/mock-npm.js')
-const chalk = require('chalk')
/* eslint-disable max-len */
const docsFixtures = {
@@ -70,6 +69,8 @@ t.test('npm help-search long output with color', async t => {
},
})
+ const chalk = await import('chalk').then(v => v.default)
+
const highlightedText = chalk.bgBlack.red('help-search')
t.equal(
output.split('\n').some(line => line.includes(highlightedText)),
diff --git a/deps/npm/test/lib/commands/help.js b/deps/npm/test/lib/commands/help.js
index e38f1bbce24d46..3fda9fb6e07fd3 100644
--- a/deps/npm/test/lib/commands/help.js
+++ b/deps/npm/test/lib/commands/help.js
@@ -61,18 +61,13 @@ const mockHelp = async (t, {
mocks: { '@npmcli/promise-spawn': mockSpawn },
otherDirs: { ...manPages.fixtures },
config,
+ command: 'help',
+ exec: execArgs,
...opts,
})
- const help = await npm.cmd('help')
- const exec = execArgs
- ? await npm.exec('help', execArgs)
- : (...a) => npm.exec('help', a)
-
return {
npm,
- help,
- exec,
manPages: manPages.pages,
getArgs: () => args,
...rest,
@@ -80,8 +75,8 @@ const mockHelp = async (t, {
}
t.test('npm help', async t => {
- const { exec, joinedOutput } = await mockHelp(t)
- await exec()
+ const { help, joinedOutput } = await mockHelp(t)
+ await help.exec()
t.match(joinedOutput(), 'npm ', 'showed npm usage')
})
@@ -216,17 +211,17 @@ t.test('npm help - works in the presence of strange man pages', async t => {
})
t.test('rejects with code', async t => {
- const { exec } = await mockHelp(t, {
+ const { help } = await mockHelp(t, {
spawnErr: Object.assign(new Error('errrrr'), { code: 'SPAWN_ERR' }),
})
- await t.rejects(exec('whoami'), /help process exited with code: SPAWN_ERR/)
+ await t.rejects(help.exec(['whoami']), /help process exited with code: SPAWN_ERR/)
})
t.test('rejects with no code', async t => {
- const { exec } = await mockHelp(t, {
+ const { help } = await mockHelp(t, {
spawnErr: new Error('errrrr'),
})
- await t.rejects(exec('whoami'), /errrrr/)
+ await t.rejects(help.exec(['whoami']), /errrrr/)
})
diff --git a/deps/npm/test/lib/commands/hook.js b/deps/npm/test/lib/commands/hook.js
index 01da9dc720dae5..e4e1214b812f3b 100644
--- a/deps/npm/test/lib/commands/hook.js
+++ b/deps/npm/test/lib/commands/hook.js
@@ -51,6 +51,7 @@ const mockHook = async (t, { hookResponse, ...npmOpts } = {}) => {
const mock = await mockNpm(t, {
...npmOpts,
+ command: 'hook',
mocks: {
libnpmhook,
...npmOpts.mocks,
@@ -60,7 +61,6 @@ const mockHook = async (t, { hookResponse, ...npmOpts } = {}) => {
return {
...mock,
now,
- hook: { exec: (args) => mock.npm.exec('hook', args) },
hookArgs: () => hookArgs,
}
}
diff --git a/deps/npm/test/lib/commands/init.js b/deps/npm/test/lib/commands/init.js
index 00caf90d0ec9b6..cb708303f405a4 100644
--- a/deps/npm/test/lib/commands/init.js
+++ b/deps/npm/test/lib/commands/init.js
@@ -6,13 +6,12 @@ const { cleanTime } = require('../../fixtures/clean-snapshot')
t.cleanSnapshot = cleanTime
-const mockNpm = async (t, { noLog, libnpmexec, initPackageJson, packageJson, ...opts } = {}) => {
+const mockNpm = async (t, { noLog, libnpmexec, initPackageJson, ...opts } = {}) => {
const res = await _mockNpm(t, {
...opts,
mocks: {
...(libnpmexec ? { libnpmexec } : {}),
...(initPackageJson ? { 'init-package-json': initPackageJson } : {}),
- ...(packageJson ? { '@npmcli/package-json': packageJson } : {}),
},
globals: {
// init-package-json prints directly to console.log
@@ -313,14 +312,7 @@ t.test('workspaces', async t => {
await t.test('fail parsing top-level package.json to set workspace', async t => {
const { npm } = await mockNpm(t, {
prefixDir: {
- 'package.json': JSON.stringify({
- name: 'top-level',
- }),
- },
- packageJson: {
- async load () {
- throw new Error('ERR')
- },
+ 'package.json': 'not json[',
},
config: { workspace: 'a', yes: true },
noLog: true,
@@ -328,8 +320,7 @@ t.test('workspaces', async t => {
await t.rejects(
npm.exec('init', []),
- /ERR/,
- 'should exit with error'
+ { code: 'EJSONPARSE' }
)
})
diff --git a/deps/npm/test/lib/commands/install.js b/deps/npm/test/lib/commands/install.js
index 1be42d6e6125f0..f40b62edde17cc 100644
--- a/deps/npm/test/lib/commands/install.js
+++ b/deps/npm/test/lib/commands/install.js
@@ -355,31 +355,32 @@ t.test('completion', async t => {
t.test('location detection and audit', async (t) => {
await t.test('audit false without package.json', async t => {
const { npm } = await loadMockNpm(t, {
+ command: 'install',
prefixDir: {
// no package.json
'readme.txt': 'just a file',
'other-dir': { a: 'a' },
},
})
- const install = await npm.cmd('install')
- t.equal(install.npm.config.get('location'), 'user')
- t.equal(install.npm.config.get('audit'), false)
+ t.equal(npm.config.get('location'), 'user')
+ t.equal(npm.config.get('audit'), false)
})
await t.test('audit true with package.json', async t => {
const { npm } = await loadMockNpm(t, {
+ command: 'install',
prefixDir: {
'package.json': '{ "name": "testpkg", "version": "1.0.0" }',
'readme.txt': 'just a file',
},
})
- const install = await npm.cmd('install')
- t.equal(install.npm.config.get('location'), 'user')
- t.equal(install.npm.config.get('audit'), true)
+ t.equal(npm.config.get('location'), 'user')
+ t.equal(npm.config.get('audit'), true)
})
await t.test('audit true without package.json when set', async t => {
const { npm } = await loadMockNpm(t, {
+ command: 'install',
prefixDir: {
// no package.json
'readme.txt': 'just a file',
@@ -389,13 +390,13 @@ t.test('location detection and audit', async (t) => {
audit: true,
},
})
- const install = await npm.cmd('install')
- t.equal(install.npm.config.get('location'), 'user')
- t.equal(install.npm.config.get('audit'), true)
+ t.equal(npm.config.get('location'), 'user')
+ t.equal(npm.config.get('audit'), true)
})
await t.test('audit true in root config without package.json', async t => {
const { npm } = await loadMockNpm(t, {
+ command: 'install',
prefixDir: {
// no package.json
'readme.txt': 'just a file',
@@ -405,13 +406,13 @@ t.test('location detection and audit', async (t) => {
otherDirs: { npmrc: 'audit=true' },
npm: ({ other }) => ({ npmRoot: other }),
})
- const install = await npm.cmd('install')
- t.equal(install.npm.config.get('location'), 'user')
- t.equal(install.npm.config.get('audit'), true)
+ t.equal(npm.config.get('location'), 'user')
+ t.equal(npm.config.get('audit'), true)
})
await t.test('test for warning when --global & --audit', async t => {
const { npm, logs } = await loadMockNpm(t, {
+ command: 'install',
prefixDir: {
// no package.json
'readme.txt': 'just a file',
@@ -422,9 +423,8 @@ t.test('location detection and audit', async (t) => {
global: true,
},
})
- const install = await npm.cmd('install')
- t.equal(install.npm.config.get('location'), 'user')
- t.equal(install.npm.config.get('audit'), true)
+ t.equal(npm.config.get('location'), 'user')
+ t.equal(npm.config.get('audit'), true)
t.equal(logs.warn[0][0], 'config')
t.equal(logs.warn[0][1], 'includes both --global and --audit, which is currently unsupported.')
})
diff --git a/deps/npm/test/lib/commands/link.js b/deps/npm/test/lib/commands/link.js
index feae75a4b9096f..65792fd141acba 100644
--- a/deps/npm/test/lib/commands/link.js
+++ b/deps/npm/test/lib/commands/link.js
@@ -10,6 +10,7 @@ t.cleanSnapshot = (str) => cleanCwd(str)
const mockLink = async (t, { globalPrefixDir, ...opts } = {}) => {
const mock = await mockNpm(t, {
...opts,
+ command: 'link',
globalPrefixDir,
mocks: {
...opts.mocks,
@@ -36,10 +37,6 @@ const mockLink = async (t, { globalPrefixDir, ...opts } = {}) => {
return {
...mock,
- link: {
- exec: (args = []) => mock.npm.exec('link', args),
- completion: (o) => mock.npm.cmd('link').then(c => c.completion(o)),
- },
printLinks,
}
}
diff --git a/deps/npm/test/lib/commands/login.js b/deps/npm/test/lib/commands/login.js
index 63666670712ef0..b42d3001ebb903 100644
--- a/deps/npm/test/lib/commands/login.js
+++ b/deps/npm/test/lib/commands/login.js
@@ -4,26 +4,47 @@ const path = require('path')
const ini = require('ini')
const { load: loadMockNpm } = require('../../fixtures/mock-npm.js')
-const mockGlobals = require('../../fixtures/mock-globals.js')
+const mockGlobals = require('@npmcli/mock-globals')
const MockRegistry = require('@npmcli/mock-registry')
const stream = require('stream')
+const mockLogin = async (t, { stdin: stdinLines, registry: registryUrl, ...options } = {}) => {
+ let stdin
+ if (stdinLines) {
+ stdin = new stream.PassThrough()
+ for (const l of stdinLines) {
+ stdin.write(l + '\n')
+ }
+ mockGlobals(t, {
+ 'process.stdin': stdin,
+ 'process.stdout': new stream.PassThrough(), // to quiet readline
+ }, { replace: true })
+ }
+ const mock = await loadMockNpm(t, {
+ ...options,
+ command: 'login',
+ })
+ const registry = new MockRegistry({
+ tap: t,
+ registry: registryUrl ?? mock.npm.config.get('registry'),
+ })
+ return {
+ registry,
+ stdin,
+ rc: () => ini.parse(fs.readFileSync(path.join(mock.home, '.npmrc'), 'utf8')),
+ ...mock,
+ }
+}
+
t.test('usage', async t => {
- const { npm } = await loadMockNpm(t)
- const login = await npm.cmd('login')
+ const { login } = await loadMockNpm(t, { command: 'login' })
t.match(login.usage, 'login', 'usage has command name in it')
})
t.test('legacy', t => {
t.test('basic login', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, registry, login, rc } = await mockLogin(t, {
+ stdin: ['test-user', 'test-password'],
config: { 'auth-type': 'legacy' },
homeDir: {
'.npmrc': [
@@ -33,66 +54,45 @@ t.test('legacy', t => {
].join('\n'),
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.couchlogin({
username: 'test-user',
password: 'test-password',
token: 'npm_test-token',
})
- await npm.exec('login', [])
+ await login.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
email: 'test-email-old@npmjs.org',
}, 'should only have token and un-nerfed old email')
})
t.test('scoped login default registry', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, registry, login, rc } = await mockLogin(t, {
+ stdin: ['test-user', 'test-password'],
config: {
'auth-type': 'legacy',
scope: '@npmcli',
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.couchlogin({
username: 'test-user',
password: 'test-password',
token: 'npm_test-token',
})
- await npm.exec('login', [])
+ await login.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
t.same(npm.config.get('@npmcli:registry'), 'https://registry.npmjs.org/')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
'@npmcli:registry': 'https://registry.npmjs.org/',
}, 'should only have token and scope:registry')
})
t.test('scoped login scoped registry', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, registry, login, rc } = await mockLogin(t, {
+ stdin: ['test-user', 'test-password'],
+ registry: 'https://diff-registry.npmjs.org',
config: {
'auth-type': 'legacy',
scope: '@npmcli',
@@ -101,20 +101,15 @@ t.test('legacy', t => {
'.npmrc': '@npmcli:registry=https://diff-registry.npmjs.org',
},
})
- const registry = new MockRegistry({
- tap: t,
- registry: 'https://diff-registry.npmjs.org',
- })
registry.couchlogin({
username: 'test-user',
password: 'test-password',
token: 'npm_test-token',
})
- await npm.exec('login', [])
+ await login.exec([])
t.same(npm.config.get('//diff-registry.npmjs.org/:_authToken'), 'npm_test-token')
t.same(npm.config.get('@npmcli:registry'), 'https://diff-registry.npmjs.org')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'@npmcli:registry': 'https://diff-registry.npmjs.org',
'//diff-registry.npmjs.org/:_authToken': 'npm_test-token',
}, 'should only have token and scope:registry')
@@ -124,51 +119,32 @@ t.test('legacy', t => {
t.test('web', t => {
t.test('basic login', async t => {
- const { npm, home } = await loadMockNpm(t, {
+ const { npm, registry, login, rc } = await mockLogin(t, {
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.weblogin({ token: 'npm_test-token' })
- await npm.exec('login', [])
+ await login.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
- const rc = ini.parse(fs.readFileSync(path.join(home, '.npmrc'), 'utf8'))
- t.same(rc, {
+ t.same(rc(), {
'//registry.npmjs.org/:_authToken': 'npm_test-token',
})
})
t.test('server error', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { registry, login } = await mockLogin(t, {
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.nock.post(registry.fullPath('/-/v1/login'))
.reply(503, {})
await t.rejects(
- npm.exec('login', []),
+ login.exec([]),
{ message: /503/ }
)
})
t.test('fallback', async t => {
- const stdin = new stream.PassThrough()
- stdin.write('test-user\n')
- stdin.write('test-password\n')
- mockGlobals(t, {
- 'process.stdin': stdin,
- 'process.stdout': new stream.PassThrough(), // to quiet readline
- }, { replace: true })
- const { npm } = await loadMockNpm(t, {
+ const { npm, registry, login } = await mockLogin(t, {
+ stdin: ['test-user', 'test-password'],
config: { 'auth-type': 'web' },
})
- const registry = new MockRegistry({
- tap: t,
- registry: npm.config.get('registry'),
- })
registry.nock.post(registry.fullPath('/-/v1/login'))
.reply(404, {})
registry.couchlogin({
@@ -176,7 +152,7 @@ t.test('web', t => {
password: 'test-password',
token: 'npm_test-token',
})
- await npm.exec('login', [])
+ await login.exec([])
t.same(npm.config.get('//registry.npmjs.org/:_authToken'), 'npm_test-token')
})
t.end()
diff --git a/deps/npm/test/lib/commands/logout.js b/deps/npm/test/lib/commands/logout.js
index 0043bb4c57922a..4ff21665f30354 100644
--- a/deps/npm/test/lib/commands/logout.js
+++ b/deps/npm/test/lib/commands/logout.js
@@ -8,6 +8,7 @@ const mockLogout = async (t, { userRc = [], ...npmOpts } = {}) => {
let result = null
const mock = await mockNpm(t, {
+ command: 'logout',
mocks: {
// XXX: refactor to use mock registry
'npm-registry-fetch': Object.assign(async (url, opts) => {
@@ -22,7 +23,6 @@ const mockLogout = async (t, { userRc = [], ...npmOpts } = {}) => {
return {
...mock,
- logout: { exec: (args) => mock.npm.exec('logout', args) },
result: () => result,
// get only the message portion of the verbose log from the command
logMsg: () => mock.logs.verbose.find(l => l[0] === 'logout')[1],
diff --git a/deps/npm/test/lib/commands/org.js b/deps/npm/test/lib/commands/org.js
index d3700304328eea..511251e1bb096e 100644
--- a/deps/npm/test/lib/commands/org.js
+++ b/deps/npm/test/lib/commands/org.js
@@ -30,6 +30,7 @@ const mockOrg = async (t, { orgSize = 1, orgList = {}, ...npmOpts } = {}) => {
const mock = await mockNpm(t, {
...npmOpts,
+ command: 'org',
mocks: {
libnpmorg,
...npmOpts.mocks,
@@ -38,11 +39,6 @@ const mockOrg = async (t, { orgSize = 1, orgList = {}, ...npmOpts } = {}) => {
return {
...mock,
- org: {
- exec: (args) => mock.npm.exec('org', args),
- completion: (arg) => mock.npm.cmd('org').then(c => c.completion(arg)),
- usage: () => mock.npm.cmd('org').then(c => c.usage),
- },
setArgs: () => setArgs,
rmArgs: () => rmArgs,
lsArgs: () => lsArgs,
@@ -77,7 +73,7 @@ t.test('completion', async t => {
t.test('npm org - invalid subcommand', async t => {
const { org } = await mockOrg(t)
- await t.rejects(org.exec(['foo']), org.usage())
+ await t.rejects(org.exec(['foo']), org.usage)
})
t.test('npm org add', async t => {
diff --git a/deps/npm/test/lib/commands/outdated.js b/deps/npm/test/lib/commands/outdated.js
index 02f2067c5480eb..7becc79d62e17d 100644
--- a/deps/npm/test/lib/commands/outdated.js
+++ b/deps/npm/test/lib/commands/outdated.js
@@ -234,6 +234,7 @@ const fixtures = {
const mockNpm = async (t, { prefixDir, ...opts } = {}) => {
const res = await _mockNpm(t, {
+ command: 'outdated',
mocks: {
pacote: {
packument,
@@ -255,151 +256,150 @@ const mockNpm = async (t, { prefixDir, ...opts } = {}) => {
return {
...res,
registry,
- exec: (args) => res.npm.exec('outdated', args),
}
}
t.test('should display outdated deps', async t => {
await t.test('outdated global', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
globalPrefixDir: fixtures.global,
config: { global: true },
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
color: 'always',
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --omit=dev', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
omit: ['dev'],
color: 'always',
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --omit=dev --omit=peer', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
omit: ['dev', 'peer'],
color: 'always',
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --omit=prod', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
omit: ['prod'],
color: 'always',
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --long', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
long: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --json', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
json: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --json --long', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
json: true,
long: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --parseable', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
parseable: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --parseable --long', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
parseable: true,
long: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated --all', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
config: {
all: true,
},
})
- await exec([])
+ await outdated.exec([])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
await t.test('outdated specific dep', async t => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.local,
})
- await exec(['cat'])
+ await outdated.exec(['cat'])
t.equal(process.exitCode, 1)
t.matchSnapshot(joinedOutput())
})
@@ -424,11 +424,11 @@ t.test('should return if no outdated deps', async t => {
},
}
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: testDir,
})
- await exec([])
+ await outdated.exec([])
t.equal(joinedOutput(), '', 'no logs')
})
@@ -451,11 +451,11 @@ t.test('throws if error with a dep', async t => {
},
}
- const { exec } = await mockNpm(t, {
+ const { outdated } = await mockNpm(t, {
prefixDir: testDir,
})
- await t.rejects(exec([]), 'There is an error with this package.')
+ await t.rejects(outdated.exec([]), 'There is an error with this package.')
})
t.test('should skip missing non-prod deps', async t => {
@@ -470,11 +470,11 @@ t.test('should skip missing non-prod deps', async t => {
node_modules: {},
}
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: testDir,
})
- await exec([])
+ await outdated.exec([])
t.equal(joinedOutput(), '', 'no logs')
})
@@ -498,10 +498,10 @@ t.test('should skip invalid pkg ranges', async t => {
},
}
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: testDir,
})
- await exec([])
+ await outdated.exec([])
t.equal(joinedOutput(), '', 'no logs')
})
@@ -524,21 +524,21 @@ t.test('should skip git specs', async t => {
},
}
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: testDir,
})
- await exec([])
+ await outdated.exec([])
t.equal(joinedOutput(), '', 'no logs')
})
t.test('workspaces', async t => {
const mockWorkspaces = async (t, { exitCode = 1, ...config } = {}) => {
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: fixtures.workspaces,
config,
})
- await exec([])
+ await outdated.exec([])
t.matchSnapshot(joinedOutput(), 'output')
t.equal(process.exitCode, exitCode ?? undefined)
@@ -603,10 +603,10 @@ t.test('aliases', async t => {
},
}
- const { exec, joinedOutput } = await mockNpm(t, {
+ const { outdated, joinedOutput } = await mockNpm(t, {
prefixDir: testDir,
})
- await exec([])
+ await outdated.exec([])
t.matchSnapshot(joinedOutput(), 'should display aliased outdated dep output')
t.equal(process.exitCode, 1)
diff --git a/deps/npm/test/lib/commands/owner.js b/deps/npm/test/lib/commands/owner.js
index f9399a60cdf81b..9329e8985e60c0 100644
--- a/deps/npm/test/lib/commands/owner.js
+++ b/deps/npm/test/lib/commands/owner.js
@@ -613,9 +613,10 @@ t.test('workspaces', async t => {
})
t.test('completion', async t => {
+ const mockCompletion = (t, opts) => loadMockNpm(t, { command: 'owner', ...opts })
+
t.test('basic commands', async t => {
- const { npm } = await loadMockNpm(t)
- const owner = await npm.cmd('owner')
+ const { owner } = await mockCompletion(t)
const testComp = async (argv, expect) => {
const res = await owner.completion({ conf: { argv: { remain: argv } } })
t.strictSame(res, expect, argv.join(' '))
@@ -631,10 +632,9 @@ t.test('completion', async t => {
})
t.test('completion npm owner rm', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { npm, owner } = await mockCompletion(t, {
prefixDir: { 'package.json': JSON.stringify({ name: packageName }) },
})
- const owner = await npm.cmd('owner')
const registry = new MockRegistry({
tap: t,
registry: npm.config.get('registry'),
@@ -649,26 +649,23 @@ t.test('completion', async t => {
})
t.test('completion npm owner rm no cwd package', async t => {
- const { npm } = await loadMockNpm(t)
- const owner = await npm.cmd('owner')
+ const { owner } = await mockCompletion(t)
const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } })
t.strictSame(res, [], 'should have no owners to autocomplete if not cwd package')
})
t.test('completion npm owner rm global', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { owner } = await mockCompletion(t, {
config: { global: true },
})
- const owner = await npm.cmd('owner')
const res = await owner.completion({ conf: { argv: { remain: ['npm', 'owner', 'rm'] } } })
t.strictSame(res, [], 'should have no owners to autocomplete if global')
})
t.test('completion npm owner rm no owners found', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { npm, owner } = await mockCompletion(t, {
prefixDir: { 'package.json': JSON.stringify({ name: packageName }) },
})
- const owner = await npm.cmd('owner')
const registry = new MockRegistry({
tap: t,
registry: npm.config.get('registry'),
diff --git a/deps/npm/test/lib/commands/pack.js b/deps/npm/test/lib/commands/pack.js
index 3e7c0225c3068c..61296cc93a53ae 100644
--- a/deps/npm/test/lib/commands/pack.js
+++ b/deps/npm/test/lib/commands/pack.js
@@ -28,8 +28,8 @@ t.test('follows pack-destination config', async t => {
}),
'tar-destination': {},
},
+ config: ({ prefix }) => ({ 'pack-destination': path.join(prefix, 'tar-destination') }),
})
- npm.config.set('pack-destination', path.join(npm.prefix, 'tar-destination'))
await npm.exec('pack', [])
const filename = 'test-package-1.0.0.tgz'
t.strictSame(outputs, [[filename]])
@@ -59,8 +59,8 @@ t.test('should log output as valid json', async t => {
version: '1.0.0',
}),
},
+ config: { json: true },
})
- npm.config.set('json', true)
await npm.exec('pack', [])
const filename = 'test-package-1.0.0.tgz'
t.matchSnapshot(outputs.map(JSON.parse), 'outputs as json')
@@ -76,8 +76,8 @@ t.test('should log scoped package output as valid json', async t => {
version: '1.0.0',
}),
},
+ config: { json: true },
})
- npm.config.set('json', true)
await npm.exec('pack', [])
const filename = 'myscope-test-package-1.0.0.tgz'
t.matchSnapshot(outputs.map(JSON.parse), 'outputs as json')
@@ -93,8 +93,8 @@ t.test('dry run', async t => {
version: '1.0.0',
}),
},
+ config: { 'dry-run': true },
})
- npm.config.set('dry-run', true)
await npm.exec('pack', [])
const filename = 'test-package-1.0.0.tgz'
t.strictSame(outputs, [[filename]])
diff --git a/deps/npm/test/lib/commands/prefix.js b/deps/npm/test/lib/commands/prefix.js
index e8295cf6a5b3c3..4fc348843fa255 100644
--- a/deps/npm/test/lib/commands/prefix.js
+++ b/deps/npm/test/lib/commands/prefix.js
@@ -2,7 +2,7 @@ const t = require('tap')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
t.test('prefix', async t => {
- const { joinedOutput, npm } = await loadMockNpm(t, { load: false })
+ const { joinedOutput, npm } = await loadMockNpm(t)
await npm.exec('prefix', [])
t.equal(
joinedOutput(),
diff --git a/deps/npm/test/lib/commands/profile.js b/deps/npm/test/lib/commands/profile.js
index 1152acfdc5c468..784523f7ccd8ad 100644
--- a/deps/npm/test/lib/commands/profile.js
+++ b/deps/npm/test/lib/commands/profile.js
@@ -24,6 +24,7 @@ const mockProfile = async (t, { npmProfile, readUserInfo, qrcode, config, ...opt
const mock = await mockNpm(t, {
...opts,
+ command: 'profile',
config: {
color: false,
...config,
@@ -37,10 +38,6 @@ const mockProfile = async (t, { npmProfile, readUserInfo, qrcode, config, ...opt
return {
...mock,
result: () => mock.joinedOutput(),
- profile: {
- exec: (args) => mock.npm.exec('profile', args),
- usage: () => mock.npm.cmd('profile').then(c => c.usage),
- },
}
}
@@ -61,7 +58,7 @@ const userProfile = {
t.test('no args', async t => {
const { profile } = await mockProfile(t)
- await t.rejects(profile.exec([]), await profile.usage())
+ await t.rejects(profile.exec([]), await profile.usage)
})
t.test('profile get no args', async t => {
@@ -1081,8 +1078,7 @@ t.test('unknown subcommand', async t => {
t.test('completion', async t => {
const testComp = async (t, { argv, expect, title } = {}) => {
- const { npm } = await mockProfile(t)
- const profile = await npm.cmd('profile')
+ const { profile } = await mockProfile(t)
t.resolveMatch(profile.completion({ conf: { argv: { remain: argv } } }), expect, title)
}
@@ -1114,8 +1110,7 @@ t.test('completion', async t => {
})
t.test('npm profile unknown subcommand autocomplete', async t => {
- const { npm } = await mockProfile(t)
- const profile = await npm.cmd('profile')
+ const { profile } = await mockProfile(t)
t.rejects(
profile.completion({ conf: { argv: { remain: ['npm', 'profile', 'asdf'] } } }),
{ message: 'asdf not recognized' },
diff --git a/deps/npm/test/lib/commands/prune.js b/deps/npm/test/lib/commands/prune.js
index 81245bcfca1671..65cfba5e5c00ab 100644
--- a/deps/npm/test/lib/commands/prune.js
+++ b/deps/npm/test/lib/commands/prune.js
@@ -4,7 +4,6 @@ const { load: loadMockNpm } = require('../../fixtures/mock-npm')
t.test('should prune using Arborist', async (t) => {
t.plan(4)
const { npm } = await loadMockNpm(t, {
- load: false,
mocks: {
'@npmcli/arborist': function (args) {
t.ok(args, 'gets options object')
diff --git a/deps/npm/test/lib/commands/publish.js b/deps/npm/test/lib/commands/publish.js
index 39696066130f9b..820760bb5704da 100644
--- a/deps/npm/test/lib/commands/publish.js
+++ b/deps/npm/test/lib/commands/publish.js
@@ -172,8 +172,7 @@ t.test('dry-run', async t => {
})
t.test('shows usage with wrong set of arguments', async t => {
- const { npm } = await loadMockNpm(t)
- const publish = await npm.cmd('publish')
+ const { publish } = await loadMockNpm(t, { command: 'publish' })
await t.rejects(publish.exec(['a', 'b', 'c']), publish.usage)
})
@@ -720,3 +719,54 @@ t.test('public access', async t => {
t.matchSnapshot(joinedOutput(), 'new package version')
t.matchSnapshot(logs.notice)
})
+
+t.test('manifest', async t => {
+ // https://github.com/npm/cli/pull/6470#issuecomment-1571234863
+
+ // snapshot test that was generated against v9.6.7 originally to ensure our
+ // own manifest does not change unexpectedly when publishing. this test
+ // asserts a bunch of keys are there that will change often and then snapshots
+ // the rest of the manifest.
+
+ const root = path.resolve(__dirname, '../../..')
+ const npmPkg = require(path.join(root, 'package.json'))
+
+ t.cleanSnapshot = (s) => s.replace(new RegExp(npmPkg.version, 'g'), '{VERSION}')
+
+ let manifest = null
+ const { npm } = await loadMockNpm(t, {
+ config: {
+ ...auth,
+ },
+ chdir: () => root,
+ mocks: {
+ libnpmpublish: {
+ publish: (m) => manifest = m,
+ },
+ },
+ })
+ await npm.exec('publish', [])
+
+ const okKeys = [
+ 'contributors',
+ 'bundleDependencies',
+ 'dependencies',
+ 'devDependencies',
+ 'templateOSS',
+ 'scripts',
+ 'tap',
+ 'readme',
+ 'gitHead',
+ 'engines',
+ 'workspaces',
+ ]
+
+ for (const k of okKeys) {
+ t.ok(manifest[k], k)
+ delete manifest[k]
+ }
+
+ manifest.man.sort()
+
+ t.matchSnapshot(manifest, 'manifest')
+})
diff --git a/deps/npm/test/lib/commands/root.js b/deps/npm/test/lib/commands/root.js
index a886b30c3ee485..506e2bc04eb84c 100644
--- a/deps/npm/test/lib/commands/root.js
+++ b/deps/npm/test/lib/commands/root.js
@@ -2,7 +2,7 @@ const t = require('tap')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
t.test('prefix', async (t) => {
- const { joinedOutput, npm } = await loadMockNpm(t, { load: false })
+ const { joinedOutput, npm } = await loadMockNpm(t)
await npm.exec('root', [])
t.equal(
joinedOutput(),
diff --git a/deps/npm/test/lib/commands/run-script.js b/deps/npm/test/lib/commands/run-script.js
index 6e2bf22adddcf8..cb54a7f51e9002 100644
--- a/deps/npm/test/lib/commands/run-script.js
+++ b/deps/npm/test/lib/commands/run-script.js
@@ -11,6 +11,7 @@ const mockRs = async (t, { windows = false, runScript, ...opts } = {}) => {
const mock = await mockNpm(t, {
...opts,
+ command: 'run-script',
mocks: {
'@npmcli/run-script': Object.assign(
async rs => {
@@ -28,18 +29,17 @@ const mockRs = async (t, { windows = false, runScript, ...opts } = {}) => {
return {
...mock,
RUN_SCRIPTS: () => RUN_SCRIPTS,
- runScript: { exec: (args) => mock.npm.exec('run-script', args) },
+ runScript: mock['run-script'],
cleanLogs: () => mock.logs.error.flat().map(v => v.toString()).map(cleanCwd),
}
}
t.test('completion', async t => {
const completion = async (t, remain, pkg, isFish = false) => {
- const { npm } = await mockRs(t,
+ const { runScript } = await mockRs(t,
pkg ? { prefixDir: { 'package.json': JSON.stringify(pkg) } } : {}
)
- const cmd = await npm.cmd('run-script')
- return cmd.completion({ conf: { argv: { remain } }, isFish })
+ return runScript.completion({ conf: { argv: { remain } }, isFish })
}
t.test('already have a script name', async t => {
diff --git a/deps/npm/test/lib/commands/stars.js b/deps/npm/test/lib/commands/stars.js
index 124d2d344d8dae..d92ced950291f5 100644
--- a/deps/npm/test/lib/commands/stars.js
+++ b/deps/npm/test/lib/commands/stars.js
@@ -6,6 +6,8 @@ const noop = () => {}
const mockStars = async (t, { npmFetch = noop, exec = true, ...opts }) => {
const mock = await mockNpm(t, {
+ command: 'stars',
+ exec,
mocks: {
'npm-registry-fetch': Object.assign(noop, realFetch, { json: npmFetch }),
'{LIB}/utils/get-identity.js': async () => 'foo',
@@ -13,16 +15,9 @@ const mockStars = async (t, { npmFetch = noop, exec = true, ...opts }) => {
...opts,
})
- const stars = { exec: (args) => mock.npm.exec('stars', args) }
-
- if (exec) {
- await stars.exec(Array.isArray(exec) ? exec : [])
- mock.result = mock.joinedOutput()
- }
-
return {
...mock,
- stars,
+ result: mock.stars.output,
logs: () => mock.logs.filter(l => l[1] === 'stars').map(l => l[2]),
}
}
@@ -45,7 +40,7 @@ t.test('no args', async t => {
}
}
- const { result } = await mockStars(t, { npmFetch, exec: true })
+ const { result } = await mockStars(t, { npmFetch })
t.matchSnapshot(
result,
@@ -122,7 +117,7 @@ t.test('unexpected error', async t => {
t.test('no pkg starred', async t => {
const npmFetch = async () => ({ rows: [] })
- const { logs } = await mockStars(t, { npmFetch, exec: true })
+ const { logs } = await mockStars(t, { npmFetch })
t.strictSame(
logs(),
diff --git a/deps/npm/test/lib/commands/team.js b/deps/npm/test/lib/commands/team.js
index a13a56d986e35e..1a5480293edc9b 100644
--- a/deps/npm/test/lib/commands/team.js
+++ b/deps/npm/test/lib/commands/team.js
@@ -6,6 +6,7 @@ t.cleanSnapshot = s => s.trim().replace(/\n+/g, '\n')
const mockTeam = async (t, { libnpmteam, ...opts } = {}) => {
const mock = await mockNpm(t, {
...opts,
+ command: 'team',
mocks: {
// XXX: this should be refactored to use the mock registry
libnpmteam: libnpmteam || {
@@ -21,7 +22,6 @@ const mockTeam = async (t, { libnpmteam, ...opts } = {}) => {
return {
...mock,
- team: { exec: (args) => mock.npm.exec('team', args) },
result: () => mock.joinedOutput(),
}
}
@@ -384,11 +384,10 @@ t.test('team rm ', async t => {
})
t.test('completion', async t => {
- const { npm } = await mockTeam(t)
- const { completion } = await npm.cmd('team')
+ const { team } = await mockTeam(t)
t.test('npm team autocomplete', async t => {
- const res = await completion({
+ const res = await team.completion({
conf: {
argv: {
remain: ['npm', 'team'],
@@ -405,7 +404,7 @@ t.test('completion', async t => {
t.test('npm team autocomplete', async t => {
for (const subcmd of ['create', 'destroy', 'add', 'rm', 'ls']) {
- const res = await completion({
+ const res = await team.completion({
conf: {
argv: {
remain: ['npm', 'team', subcmd],
@@ -421,7 +420,8 @@ t.test('completion', async t => {
})
t.test('npm team unknown subcommand autocomplete', async t => {
- t.rejects(completion({ conf: { argv: { remain: ['npm', 'team', 'missing-subcommand'] } } }),
+ t.rejects(
+ team.completion({ conf: { argv: { remain: ['npm', 'team', 'missing-subcommand'] } } }),
{ message: 'missing-subcommand not recognized' }, 'should throw a a not recognized error'
)
diff --git a/deps/npm/test/lib/commands/token.js b/deps/npm/test/lib/commands/token.js
index 1fd686a4427c9b..2bc4af4a81a3d1 100644
--- a/deps/npm/test/lib/commands/token.js
+++ b/deps/npm/test/lib/commands/token.js
@@ -14,6 +14,7 @@ const mockToken = async (t, { profile, getCredentialsByURI, readUserInfo, ...opt
const mock = await mockNpm(t, {
...opts,
+ command: 'token',
mocks,
})
@@ -22,22 +23,14 @@ const mockToken = async (t, { profile, getCredentialsByURI, readUserInfo, ...opt
mock.npm.config.getCredentialsByURI = getCredentialsByURI
}
- const token = {
- exec: (args) => mock.npm.exec('token', args),
- }
-
- return {
- ...mock,
- token,
- }
+ return mock
}
t.test('completion', async t => {
- const { npm } = await mockToken(t)
- const { completion } = await npm.cmd('token')
+ const { token } = await mockToken(t)
const testComp = (argv, expect) => {
- t.resolveMatch(completion({ conf: { argv: { remain: argv } } }), expect, argv.join(' '))
+ t.resolveMatch(token.completion({ conf: { argv: { remain: argv } } }), expect, argv.join(' '))
}
testComp(['npm', 'token'], ['list', 'revoke', 'create'])
@@ -45,7 +38,7 @@ t.test('completion', async t => {
testComp(['npm', 'token', 'revoke'], [])
testComp(['npm', 'token', 'create'], [])
- t.rejects(completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), {
+ t.rejects(token.completion({ conf: { argv: { remain: ['npm', 'token', 'foobar'] } } }), {
message: 'foobar not recognize',
})
})
diff --git a/deps/npm/test/lib/commands/uninstall.js b/deps/npm/test/lib/commands/uninstall.js
index 59a517d144d38a..ae116d44c208bf 100644
--- a/deps/npm/test/lib/commands/uninstall.js
+++ b/deps/npm/test/lib/commands/uninstall.js
@@ -188,19 +188,15 @@ t.test('no args global but no package.json', async t => {
)
})
-t.test('unknown error reading from localPrefix package.json', async t => {
+t.test('non ENOENT error reading from localPrefix package.json', async t => {
const { uninstall } = await mockNpm(t, {
config: { global: true },
- mocks: {
- 'read-package-json-fast': async () => {
- throw new Error('ERR')
- },
- },
+ prefixDir: { 'package.json': 'not[json]' },
})
await t.rejects(
uninstall([]),
- /ERR/,
- 'should throw unknown error'
+ { code: 'EJSONPARSE' },
+ 'should throw non ENOENT error'
)
})
diff --git a/deps/npm/test/lib/commands/unpublish.js b/deps/npm/test/lib/commands/unpublish.js
index 96c06bf3ffee69..6e898bd3d07e4b 100644
--- a/deps/npm/test/lib/commands/unpublish.js
+++ b/deps/npm/test/lib/commands/unpublish.js
@@ -58,7 +58,7 @@ t.test('no args --force error reading package.json', async t => {
await t.rejects(
npm.exec('unpublish', []),
- /Failed to parse json/,
+ /Invalid package.json/,
'should throw error from reading package.json'
)
})
@@ -427,13 +427,13 @@ t.test('scoped registry config', async t => {
})
t.test('completion', async t => {
- const { npm } = await loadMockNpm(t, {
+ const { npm, unpublish } = await loadMockNpm(t, {
+ command: 'unpublish',
config: {
...auth,
},
})
- const unpublish = await npm.cmd('unpublish')
const testComp =
async (t, { argv, partialWord, expect, title }) => {
const res = await unpublish.completion(
diff --git a/deps/npm/test/lib/commands/version.js b/deps/npm/test/lib/commands/version.js
index c48ff827fa28cb..8aa6c088bfc9b5 100644
--- a/deps/npm/test/lib/commands/version.js
+++ b/deps/npm/test/lib/commands/version.js
@@ -2,11 +2,12 @@ const { readFileSync, statSync } = require('fs')
const { resolve } = require('path')
const t = require('tap')
const _mockNpm = require('../../fixtures/mock-npm')
-const mockGlobals = require('../../fixtures/mock-globals.js')
+const mockGlobals = require('@npmcli/mock-globals')
const mockNpm = async (t, opts = {}) => {
const res = await _mockNpm(t, {
...opts,
+ command: 'version',
mocks: {
...opts.mocks,
'{ROOT}/package.json': { version: '1.0.0' },
@@ -14,7 +15,6 @@ const mockNpm = async (t, opts = {}) => {
})
return {
...res,
- version: { exec: (args) => res.npm.exec('version', args) },
result: () => res.outputs[0],
}
}
@@ -55,8 +55,7 @@ t.test('node@1', async t => {
})
t.test('completion', async t => {
- const { npm } = await mockNpm(t)
- const version = await npm.cmd('version')
+ const { version } = await mockNpm(t)
const testComp = async (argv, expect) => {
const res = await version.completion({ conf: { argv: { remain: argv } } })
t.strictSame(res, expect, argv.join(' '))
diff --git a/deps/npm/test/lib/commands/view.js b/deps/npm/test/lib/commands/view.js
index 51bc130df24e5e..ca07ef9eec2ff6 100644
--- a/deps/npm/test/lib/commands/view.js
+++ b/deps/npm/test/lib/commands/view.js
@@ -576,8 +576,7 @@ t.test('workspaces', async t => {
})
t.test('completion', async t => {
- const { npm } = await loadMockNpm(t)
- const view = await npm.cmd('view')
+ const { view } = await loadMockNpm(t, { command: 'view' })
const res = await view.completion({
conf: { argv: { remain: ['npm', 'view', 'green@1.0.0'] } },
})
@@ -585,8 +584,7 @@ t.test('completion', async t => {
})
t.test('no package completion', async t => {
- const { npm } = await loadMockNpm(t)
- const view = await npm.cmd('view')
+ const { view } = await loadMockNpm(t, { command: 'view' })
const res = await view.completion({ conf: { argv: { remain: ['npm', 'view'] } } })
t.notOk(res, 'there is no package completion')
t.end()
diff --git a/deps/npm/test/lib/docs.js b/deps/npm/test/lib/docs.js
index b8a1a4fc600747..5e7bfe45a93ccc 100644
--- a/deps/npm/test/lib/docs.js
+++ b/deps/npm/test/lib/docs.js
@@ -1,14 +1,14 @@
const t = require('tap')
-const { join, resolve, basename, extname, dirname } = require('path')
+const { join, resolve, basename, extname } = require('path')
const fs = require('fs/promises')
const localeCompare = require('@isaacs/string-locale-compare')('en')
const docs = require('@npmcli/docs')
const { load: loadMockNpm } = require('../fixtures/mock-npm.js')
-const mockGlobals = require('../fixtures/mock-globals.js')
const { definitions } = require('../../lib/utils/config/index.js')
const cmdList = require('../../lib/utils/cmd-list.js')
const pkg = require('../../package.json')
+const { cleanCwd } = require('../fixtures/clean-snapshot.js')
t.test('command list', async t => {
for (const [key, value] of Object.entries(cmdList)) {
@@ -30,23 +30,48 @@ t.test('config', async t => {
t.matchSnapshot(docs.config(docs.TAGS.CONFIG, {}), 'all definitions')
})
-t.test('basic usage', async t => {
- mockGlobals(t, { process: { platform: 'posix' } })
+t.test('flat options', async t => {
+ t.cleanSnapshot = (s) => cleanCwd(s)
+ .split(cleanCwd(process.execPath)).join('{NODE}')
- t.cleanSnapshot = str => str
- .split(dirname(dirname(__dirname))).join('{BASEDIR}')
- .split(pkg.version).join('{VERSION}')
+ const { npm } = await loadMockNpm(t, {
+ command: 'version',
+ exec: true,
+ globals: {
+ 'process.env': {
+ EDITOR: '{EDITOR}',
+ SHELL: '{SHELL}',
+ },
+ 'process.version': '2.2.2',
+ 'process.platform': '{PLATFORM}',
+ 'process.arch': '{ARCH}',
+ },
+ mocks: {
+ 'ci-info': { name: '{CI}' },
+ '{ROOT}/package.json': { version: '1.1.1' },
+ },
+ })
+
+ t.matchSnapshot(npm.flatOptions, 'full flat options object')
+})
+t.test('basic usage', async t => {
// snapshot basic usage without commands since all the command snapshots
// are generated in the following test
const { npm } = await loadMockNpm(t, {
mocks: {
'{LIB}/utils/cmd-list.js': { commands: [] },
},
+ config: { userconfig: '/some/config/file/.npmrc' },
+ globals: { process: { platform: 'posix' } },
})
- npm.config.set('userconfig', '/some/config/file/.npmrc')
- t.matchSnapshot(await npm.usage)
+ t.cleanSnapshot = str => str
+ .replace(npm.npmRoot, '{BASEDIR}')
+ .replace(npm.config.get('userconfig'), '{USERCONFIG}')
+ .split(pkg.version).join('{VERSION}')
+
+ t.matchSnapshot(npm.usage)
})
t.test('usage', async t => {
@@ -81,9 +106,8 @@ t.test('usage', async t => {
t.test(cmd, async t => {
let output = null
if (!bareCommands.includes(cmd)) {
- const { npm } = await loadMockNpm(t)
- const impl = await npm.cmd(cmd)
- output = impl.usage
+ const mock = await loadMockNpm(t, { command: cmd })
+ output = mock[cmd].usage
}
const usage = docs.usage(docs.TAGS.USAGE, { path: cmd })
diff --git a/deps/npm/test/lib/es6/validate-engines.js b/deps/npm/test/lib/es6/validate-engines.js
new file mode 100644
index 00000000000000..0e6bce726af966
--- /dev/null
+++ b/deps/npm/test/lib/es6/validate-engines.js
@@ -0,0 +1,34 @@
+const t = require('tap')
+const mockGlobals = require('@npmcli/mock-globals')
+const tmock = require('../../fixtures/tmock')
+
+const mockValidateEngines = (t) => {
+ const validateEngines = tmock(t, '{LIB}/es6/validate-engines.js', {
+ '{ROOT}/package.json': { version: '1.2.3', engines: { node: '>=0' } },
+ })
+ mockGlobals(t, { 'process.version': 'v4.5.6' })
+ return validateEngines(process, () => (_, r) => r)
+}
+
+t.test('validate engines', async t => {
+ t.equal(process.listenerCount('uncaughtException'), 0)
+ t.equal(process.listenerCount('unhandledRejection'), 0)
+
+ const result = mockValidateEngines(t)
+
+ t.equal(process.listenerCount('uncaughtException'), 1)
+ t.equal(process.listenerCount('unhandledRejection'), 1)
+
+ t.match(result, {
+ node: 'v4.5.6',
+ npm: 'v1.2.3',
+ engines: '>=0',
+ /* eslint-disable-next-line max-len */
+ unsupportedMessage: 'npm v1.2.3 does not support Node.js v4.5.6. This version of npm supports the following node versions: `>=0`. You can find the latest version at https://nodejs.org/.',
+ })
+
+ result.off()
+
+ t.equal(process.listenerCount('uncaughtException'), 0)
+ t.equal(process.listenerCount('unhandledRejection'), 0)
+})
diff --git a/deps/npm/test/lib/fixtures/mock-globals.js b/deps/npm/test/lib/fixtures/mock-globals.js
deleted file mode 100644
index 55418dd8e199d7..00000000000000
--- a/deps/npm/test/lib/fixtures/mock-globals.js
+++ /dev/null
@@ -1,331 +0,0 @@
-const t = require('tap')
-const mockGlobals = require('../../fixtures/mock-globals')
-
-/* eslint-disable no-console */
-const originals = {
- platform: process.platform,
- error: console.error,
- stderrOn: process.stderr.on,
- stderrWrite: process.stderr.write,
- shell: process.env.SHELL,
- home: process.env.HOME,
- argv: process.argv,
- env: process.env,
- setInterval,
-}
-
-t.test('console', async t => {
- await t.test('mocks', async (t) => {
- const errors = []
- mockGlobals(t, {
- 'console.error': (...args) => errors.push(...args),
- })
-
- console.error(1)
- console.error(2)
- console.error(3)
- t.strictSame(errors, [1, 2, 3], 'i got my errors')
- })
-
- t.equal(console.error, originals.error)
-})
-/* eslint-enable no-console */
-
-t.test('platform', async (t) => {
- t.equal(process.platform, originals.platform)
-
- await t.test('posix', async (t) => {
- mockGlobals(t, { 'process.platform': 'posix' })
- t.equal(process.platform, 'posix')
-
- await t.test('win32 --> woo', async (t) => {
- mockGlobals(t, { 'process.platform': 'win32' })
- t.equal(process.platform, 'win32')
-
- mockGlobals(t, { 'process.platform': 'woo' })
- t.equal(process.platform, 'woo')
- })
-
- t.equal(process.platform, 'posix')
- })
-
- t.equal(process.platform, originals.platform)
-})
-
-t.test('manual reset', async t => {
- let errorHandler, data
-
- const { reset } = mockGlobals(t, {
- 'process.stderr.on': (__, handler) => {
- errorHandler = handler
- reset['process.stderr.on']()
- },
- 'process.stderr.write': (chunk, callback) => {
- data = chunk
- process.nextTick(() => {
- errorHandler({ errno: 'EPIPE' })
- callback()
- })
- reset['process.stderr.write']()
- },
- })
-
- await new Promise((res, rej) => {
- process.stderr.on('error', er => er.errno === 'EPIPE' ? res() : rej(er))
- process.stderr.write('hey', res)
- })
-
- t.equal(process.stderr.on, originals.stderrOn)
- t.equal(process.stderr.write, originals.stderrWrite)
- t.equal(data, 'hey', 'handles EPIPE errors')
- t.ok(errorHandler)
-})
-
-t.test('reset called multiple times', async (t) => {
- await t.test('single reset', async t => {
- const { reset } = mockGlobals(t, { 'process.platform': 'z' })
- t.equal(process.platform, 'z')
-
- reset['process.platform']()
- t.equal(process.platform, originals.platform)
-
- reset['process.platform']()
- reset['process.platform']()
- reset['process.platform']()
- t.equal(process.platform, originals.platform)
- })
-
- t.equal(process.platform, originals.platform)
-})
-
-t.test('object mode', async t => {
- await t.test('mocks', async t => {
- const home = t.testdir()
-
- mockGlobals(t, {
- process: {
- stderr: {
- on: '1',
- },
- env: {
- HOME: home,
- },
- },
- })
-
- t.equal(process.stderr.on, '1')
- t.equal(process.env.HOME, home)
- })
-
- t.equal(process.env.HOME, originals.home)
- t.equal(process.stderr.write, originals.stderrWrite)
-})
-
-t.test('mixed object/string mode', async t => {
- await t.test('mocks', async t => {
- const home = t.testdir()
-
- mockGlobals(t, {
- 'process.env': {
- HOME: home,
- TEST: '1',
- },
- })
-
- t.equal(process.env.HOME, home)
- t.equal(process.env.TEST, '1')
- })
-
- t.equal(process.env.HOME, originals.home)
- t.equal(process.env.TEST, undefined)
-})
-
-t.test('conflicting mixed object/string mode', async t => {
- await t.test('same key', async t => {
- t.throws(
- () => mockGlobals(t, {
- process: {
- env: {
- HOME: '1',
- TEST: '1',
- NODE_ENV: '1',
- },
- stderr: {
- write: '1',
- },
- },
- 'process.env.HOME': '1',
- 'process.stderr.write': '1',
- }),
- /process.env.HOME,process.stderr.write/
- )
- })
-
- await t.test('partial overwrite with replace', async t => {
- t.throws(
- () => mockGlobals(t, {
- process: {
- env: {
- HOME: '1',
- TEST: '1',
- NODE_ENV: '1',
- },
- stderr: {
- write: '1',
- },
- },
- 'process.env.HOME': '1',
- 'process.stderr.write': '1',
- }, { replace: true }),
- /process -> process.env.HOME,process.stderr.write/
- )
- })
-})
-
-t.test('falsy values', async t => {
- await t.test('undefined deletes', async t => {
- mockGlobals(t, { 'process.platform': undefined })
- t.notOk(Object.prototype.hasOwnProperty.call(process, 'platform'))
- t.equal(process.platform, undefined)
- })
-
- await t.test('null', async t => {
- mockGlobals(t, { 'process.platform': null })
- t.ok(Object.prototype.hasOwnProperty.call(process, 'platform'))
- t.equal(process.platform, null)
- })
-
- t.equal(process.platform, originals.platform)
-})
-
-t.test('date', async t => {
- await t.test('mocks', async t => {
- mockGlobals(t, {
- 'Date.now': () => 100,
- 'Date.prototype.toISOString': () => 'DDD',
- })
- t.equal(Date.now(), 100)
- t.equal(new Date().toISOString(), 'DDD')
- })
-
- t.ok(Date.now() > 100)
- t.ok(new Date().toISOString().includes('T'))
-})
-
-t.test('argv', async t => {
- await t.test('argv', async t => {
- mockGlobals(t, { 'process.argv': ['node', 'woo'] })
- t.strictSame(process.argv, ['node', 'woo'])
- })
-
- t.strictSame(process.argv, originals.argv)
-})
-
-t.test('replace', async (t) => {
- await t.test('env', async t => {
- mockGlobals(t, { 'process.env': { HOME: '1' } }, { replace: true })
- t.strictSame(process.env, { HOME: '1' })
- t.equal(Object.keys(process.env).length, 1)
- })
-
- await t.test('setInterval', async t => {
- mockGlobals(t, { setInterval: 0 }, { replace: true })
- t.strictSame(setInterval, 0)
- })
-
- t.strictSame(setInterval, originals.setInterval)
- t.strictSame(process.env, originals.env)
-})
-
-t.test('dot key', async t => {
- const dotKey = 'this.is.a.single.key'
- mockGlobals(t, {
- [`process.env."${dotKey}"`]: 'value',
- })
- t.strictSame(process.env[dotKey], 'value')
-})
-
-t.test('multiple mocks and resets', async (t) => {
- const initial = 'a'
- const platforms = ['b', 'c', 'd', 'e', 'f', 'g']
-
- await t.test('first in, first out', async t => {
- mockGlobals(t, { 'process.platform': initial })
- t.equal(process.platform, initial)
-
- await t.test('platforms', async (t) => {
- const resets = platforms.map((platform) => {
- const { reset } = mockGlobals(t, { 'process.platform': platform })
- t.equal(process.platform, platform)
- return reset['process.platform']
- }).reverse()
-
- ;[...platforms.reverse()].forEach((platform, index) => {
- const reset = resets[index]
- const nextPlatform = index === platforms.length - 1 ? initial : platforms[index + 1]
- t.equal(process.platform, platform)
- reset()
- t.equal(process.platform, nextPlatform, 'first reset')
- reset()
- reset()
- t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
- })
- })
-
- t.equal(process.platform, initial)
- })
-
- await t.test('last in,first out', async t => {
- mockGlobals(t, { 'process.platform': initial })
- t.equal(process.platform, initial)
-
- await t.test('platforms', async (t) => {
- const resets = platforms.map((platform) => {
- const { reset } = mockGlobals(t, { 'process.platform': platform })
- t.equal(process.platform, platform)
- return reset['process.platform']
- })
-
- resets.forEach((reset, index) => {
- // Calling a reset out of order removes it from the stack
- // but does not change the descriptor so it should still be the
- // last in descriptor until there are none left
- const lastPlatform = platforms[platforms.length - 1]
- const nextPlatform = index === platforms.length - 1 ? initial : lastPlatform
- t.equal(process.platform, lastPlatform)
- reset()
- t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
- reset()
- reset()
- t.equal(process.platform, nextPlatform, 'multiple resets are indempotent')
- })
- })
-
- t.equal(process.platform, initial)
- })
-
- t.test('reset all', async (t) => {
- const { teardown } = mockGlobals(t, { 'process.platform': initial })
-
- await t.test('platforms', async (t) => {
- const resets = platforms.map((p) => {
- const { teardown: nestedTeardown, reset } = mockGlobals(t, { 'process.platform': p })
- t.equal(process.platform, p)
- return [
- reset['process.platform'],
- nestedTeardown,
- ]
- })
-
- resets.forEach(r => r[1]())
- t.equal(process.platform, initial, 'teardown goes to initial value')
-
- resets.forEach((r) => r[0]())
- t.equal(process.platform, initial, 'calling resets after teardown does nothing')
- })
-
- t.equal(process.platform, initial)
- teardown()
- t.equal(process.platform, originals.platform)
- })
-})
diff --git a/deps/npm/test/lib/load-all-commands.js b/deps/npm/test/lib/load-all-commands.js
index 1742376a36e69d..d3846434489cee 100644
--- a/deps/npm/test/lib/load-all-commands.js
+++ b/deps/npm/test/lib/load-all-commands.js
@@ -6,20 +6,30 @@ const t = require('tap')
const util = require('util')
const { load: loadMockNpm } = require('../fixtures/mock-npm.js')
const { commands } = require('../../lib/utils/cmd-list.js')
+const BaseCommand = require('../../lib/base-command.js')
const isAsyncFn = (v) => typeof v === 'function' && /^\[AsyncFunction:/.test(util.inspect(v))
t.test('load each command', async t => {
+ const counts = {
+ completion: 0,
+ ignoreImplicitWorkspace: 0,
+ workspaces: 0,
+ noParams: 0,
+ }
+
for (const cmd of commands) {
- t.test(cmd, async t => {
+ await t.test(cmd, async t => {
const { npm, outputs, cmd: impl } = await loadMockNpm(t, {
command: cmd,
config: { usage: true },
})
const ctor = impl.constructor
- if (impl.completion) {
- t.type(impl.completion, 'function', 'completion, if present, is a function')
+ t.notOk(impl.completion, 'completion is static, not on instance')
+ if (ctor.completion) {
+ t.ok(isAsyncFn(ctor.completion), 'completion is async function')
+ counts.completion++
}
// exec fn
@@ -28,7 +38,15 @@ t.test('load each command', async t => {
// workspaces
t.type(ctor.ignoreImplicitWorkspace, 'boolean', 'ctor has ignoreImplictWorkspace boolean')
+ if (ctor.ignoreImplicitWorkspace !== BaseCommand.ignoreImplicitWorkspace) {
+ counts.ignoreImplicitWorkspace++
+ }
+
t.type(ctor.workspaces, 'boolean', 'ctor has workspaces boolean')
+ if (ctor.workspaces !== BaseCommand.workspaces) {
+ counts.workspaces++
+ }
+
if (ctor.workspaces) {
t.ok(isAsyncFn(impl.execWorkspaces), 'execWorkspaces is async')
t.ok(impl.exec.length <= 1, 'execWorkspaces fn has 0 or 1 args')
@@ -38,13 +56,32 @@ t.test('load each command', async t => {
// name/desc
t.ok(impl.description, 'implementation has a description')
+ t.equal(impl.description, ctor.description, 'description is same on instance and ctor')
t.ok(impl.name, 'implementation has a name')
+ t.equal(impl.name, ctor.name, 'name is same on instance and ctor')
t.equal(cmd, impl.name, 'command list and name are the same')
+ // params are optional
+ if (impl.params) {
+ t.equal(impl.params, ctor.params, 'params is same on instance and ctor')
+ t.ok(impl.params, 'implementation has a params')
+ } else {
+ counts.noParams++
+ }
+
// usage
t.match(impl.usage, cmd, 'usage contains the command')
await npm.exec(cmd, [])
t.match(outputs[0][0], impl.usage, 'usage is what is output')
+ t.match(outputs[0][0], ctor.describeUsage, 'usage is what is output')
+ t.notOk(impl.describeUsage, 'describe usage is only static')
})
}
+
+ // make sure refactors dont move or rename these static properties since
+ // we guard against the tests for them above
+ t.ok(counts.completion > 0, 'has some completion functions')
+ t.ok(counts.ignoreImplicitWorkspace > 0, 'has some commands that change ignoreImplicitWorkspace')
+ t.ok(counts.workspaces > 0, 'has some commands that change workspaces')
+ t.ok(counts.noParams > 0, 'has some commands that do not have params')
})
diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js
index 61b31be6200286..162e8c83ca4a4d 100644
--- a/deps/npm/test/lib/npm.js
+++ b/deps/npm/test/lib/npm.js
@@ -2,7 +2,7 @@ const t = require('tap')
const { resolve, dirname, join } = require('path')
const fs = require('fs')
const { load: loadMockNpm } = require('../fixtures/mock-npm.js')
-const mockGlobals = require('../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const { commands } = require('../../lib/utils/cmd-list.js')
t.test('not yet loaded', async t => {
@@ -105,10 +105,6 @@ t.test('npm.load', async t => {
mockGlobals(t, { process: { platform: 'win32' } })
t.equal(npm.bin, npm.globalBin, 'bin is global bin in windows mode')
t.equal(npm.dir, npm.globalDir, 'dir is global dir in windows mode')
-
- const tmp = npm.tmp
- t.match(tmp, String, 'npm.tmp is a string')
- t.equal(tmp, npm.tmp, 'getter only generates it once')
})
await t.test('forceful loading', async t => {
@@ -127,7 +123,7 @@ t.test('npm.load', async t => {
await t.test('node is a symlink', async t => {
const node = process.platform === 'win32' ? 'node.exe' : 'node'
- const { npm, logs, outputs, prefix } = await loadMockNpm(t, {
+ const { Npm, npm, logs, outputs, prefix } = await loadMockNpm(t, {
prefixDir: {
bin: t.fixture('symlink', dirname(process.execPath)),
},
@@ -168,8 +164,8 @@ t.test('npm.load', async t => {
t.equal(npm.command, 'll', 'command set to first npm command')
t.equal(npm.flatOptions.npmCommand, 'll', 'npmCommand flatOption set')
- const ll = await npm.cmd('ll')
- t.same(outputs, [[ll.usage]], 'print usage')
+ const ll = Npm.cmd('ll')
+ t.same(outputs, [[ll.describeUsage]], 'print usage')
npm.config.set('usage', false)
outputs.length = 0
@@ -202,7 +198,6 @@ t.test('npm.load', async t => {
await t.test('--no-workspaces with --workspace', async t => {
const { npm } = await loadMockNpm(t, {
- load: false,
prefixDir: {
packages: {
a: {
@@ -554,14 +549,14 @@ t.test('output clears progress and console.logs the message', async t => {
})
t.test('aliases and typos', async t => {
- const { npm } = await loadMockNpm(t, { load: false })
- await t.rejects(npm.cmd('thisisnotacommand'), { code: 'EUNKNOWNCOMMAND' })
- await t.rejects(npm.cmd(''), { code: 'EUNKNOWNCOMMAND' })
- await t.rejects(npm.cmd('birthday'), { code: 'EUNKNOWNCOMMAND' })
- await t.resolves(npm.cmd('it'), { name: 'install-test' })
- await t.resolves(npm.cmd('installTe'), { name: 'install-test' })
- await t.resolves(npm.cmd('access'), { name: 'access' })
- await t.resolves(npm.cmd('auth'), { name: 'owner' })
+ const { Npm } = await loadMockNpm(t, { init: false })
+ t.throws(() => Npm.cmd('thisisnotacommand'), { code: 'EUNKNOWNCOMMAND' })
+ t.throws(() => Npm.cmd(''), { code: 'EUNKNOWNCOMMAND' })
+ t.throws(() => Npm.cmd('birthday'), { code: 'EUNKNOWNCOMMAND' })
+ t.match(Npm.cmd('it').name, 'install-test')
+ t.match(Npm.cmd('installTe').name, 'install-test')
+ t.match(Npm.cmd('access').name, 'access')
+ t.match(Npm.cmd('auth').name, 'owner')
})
t.test('explicit workspace rejection', async t => {
@@ -663,30 +658,28 @@ t.test('implicit workspace accept', async t => {
t.test('usage', async t => {
t.test('with browser', async t => {
- mockGlobals(t, { process: { platform: 'posix' } })
- const { npm } = await loadMockNpm(t)
- const usage = await npm.usage
+ const { npm } = await loadMockNpm(t, { globals: { process: { platform: 'posix' } } })
+ const usage = npm.usage
npm.config.set('viewer', 'browser')
- const browserUsage = await npm.usage
+ const browserUsage = npm.usage
t.notMatch(usage, '(in a browser)')
t.match(browserUsage, '(in a browser)')
})
t.test('windows always uses browser', async t => {
- mockGlobals(t, { process: { platform: 'win32' } })
- const { npm } = await loadMockNpm(t)
- const usage = await npm.usage
+ const { npm } = await loadMockNpm(t, { globals: { process: { platform: 'win32' } } })
+ const usage = npm.usage
npm.config.set('viewer', 'browser')
- const browserUsage = await npm.usage
+ const browserUsage = npm.usage
t.match(usage, '(in a browser)')
t.match(browserUsage, '(in a browser)')
})
t.test('includes commands', async t => {
const { npm } = await loadMockNpm(t)
- const usage = await npm.usage
+ const usage = npm.usage
npm.config.set('long', true)
- const longUsage = await npm.usage
+ const longUsage = npm.usage
const lastCmd = commands[commands.length - 1]
for (const cmd of commands) {
@@ -719,7 +712,7 @@ t.test('usage', async t => {
for (const width of widths) {
t.test(`column width ${width}`, async t => {
mockGlobals(t, { 'process.stdout.columns': width })
- const usage = await npm.usage
+ const usage = npm.usage
t.matchSnapshot(usage)
})
}
diff --git a/deps/npm/test/lib/utils/ansi-trim.js b/deps/npm/test/lib/utils/ansi-trim.js
index de8d3929370001..5a9e3b0c87cba7 100644
--- a/deps/npm/test/lib/utils/ansi-trim.js
+++ b/deps/npm/test/lib/utils/ansi-trim.js
@@ -1,5 +1,8 @@
const t = require('tap')
const ansiTrim = require('../../../lib/utils/ansi-trim.js')
-const chalk = require('chalk')
-t.equal(ansiTrim('foo'), 'foo', 'does nothing if no ansis')
-t.equal(ansiTrim(chalk.red('foo')), 'foo', 'strips out ansis')
+
+t.test('basic', async t => {
+ const chalk = await import('chalk').then(v => v.default)
+ t.equal(ansiTrim('foo'), 'foo', 'does nothing if no ansis')
+ t.equal(ansiTrim(chalk.red('foo')), 'foo', 'strips out ansis')
+})
diff --git a/deps/npm/test/lib/utils/audit-error.js b/deps/npm/test/lib/utils/audit-error.js
index 1cb29a0857d752..f6be56a152f710 100644
--- a/deps/npm/test/lib/utils/audit-error.js
+++ b/deps/npm/test/lib/utils/audit-error.js
@@ -10,6 +10,8 @@ const auditError = async (t, { command, error, ...config } = {}) => {
const mock = await mockNpm(t, {
command,
config,
+ exec: true,
+ prefixDir: { 'package.json': '{}', 'package-lock.json': '{}' },
})
const res = {}
@@ -32,7 +34,8 @@ t.test('no error, not audit command', async t => {
t.equal(result, false, 'no error')
t.notOk(error, 'no error')
- t.strictSame(output, '', 'no output')
+ t.match(output.trim(), /up to date/, 'install output')
+ t.match(output.trim(), /found 0 vulnerabilities/, 'install output')
t.strictSame(logs, [], 'no warnings')
})
@@ -53,7 +56,8 @@ t.test('error, not audit command', async t => {
t.equal(result, true, 'had error')
t.notOk(error, 'no error')
- t.strictSame(output, '', 'no output')
+ t.match(output.trim(), /up to date/, 'install output')
+ t.match(output.trim(), /found 0 vulnerabilities/, 'install output')
t.strictSame(logs, [], 'no warnings')
})
@@ -62,7 +66,7 @@ t.test('error, audit command, not json', async t => {
command: 'audit',
error: {
message: 'message',
- body: Buffer.from('body'),
+ body: Buffer.from('body error text'),
method: 'POST',
uri: 'https://example.com/not/a/registry',
headers: {
@@ -75,7 +79,7 @@ t.test('error, audit command, not json', async t => {
t.equal(result, undefined)
t.ok(error, 'throws error')
- t.strictSame(output, 'body', 'some output')
+ t.match(output, 'body error text', 'some output')
t.strictSame(logs, [['audit', 'message']], 'some warnings')
})
@@ -97,7 +101,7 @@ t.test('error, audit command, json', async t => {
t.equal(result, undefined)
t.ok(error, 'throws error')
- t.strictSame(output,
+ t.match(output,
'{\n' +
' "message": "message",\n' +
' "method": "POST",\n' +
diff --git a/deps/npm/test/lib/utils/completion/installed-deep.js b/deps/npm/test/lib/utils/completion/installed-deep.js
index 434d0214db4c84..0af26861ff83a5 100644
--- a/deps/npm/test/lib/utils/completion/installed-deep.js
+++ b/deps/npm/test/lib/utils/completion/installed-deep.js
@@ -1,32 +1,6 @@
-const { resolve } = require('path')
const t = require('tap')
const installedDeep = require('../../../../lib/utils/completion/installed-deep.js')
-
-let prefix
-let globalDir = 'MISSING_GLOBAL_DIR'
-const _flatOptions = {
- depth: Infinity,
- global: false,
- workspacesEnabled: true,
- Arborist: require('@npmcli/arborist'),
- get prefix () {
- return prefix
- },
-}
-const npm = {
- flatOptions: _flatOptions,
- get prefix () {
- return _flatOptions.prefix
- },
- get globalDir () {
- return globalDir
- },
- config: {
- get (key) {
- return _flatOptions[key]
- },
- },
-}
+const mockNpm = require('../../../fixtures/mock-npm')
const fixture = {
'package.json': JSON.stringify({
@@ -153,16 +127,23 @@ const globalFixture = {
},
}
-t.test('get list of package names', async t => {
- const fix = t.testdir({
- local: fixture,
- global: globalFixture,
+const mockDeep = async (t, config) => {
+ const mock = await mockNpm(t, {
+ prefixDir: fixture,
+ globalPrefixDir: globalFixture,
+ config: {
+ depth: Infinity,
+ ...config,
+ },
})
- prefix = resolve(fix, 'local')
- globalDir = resolve(fix, 'global/node_modules')
+ const res = await installedDeep(mock.npm)
- const res = await installedDeep(npm, null)
+ return res
+}
+
+t.test('get list of package names', async t => {
+ const res = await mockDeep(t)
t.same(
res,
[
@@ -179,17 +160,7 @@ t.test('get list of package names', async t => {
})
t.test('get list of package names as global', async t => {
- const fix = t.testdir({
- local: fixture,
- global: globalFixture,
- })
-
- prefix = resolve(fix, 'local')
- globalDir = resolve(fix, 'global/node_modules')
-
- _flatOptions.global = true
-
- const res = await installedDeep(npm, null)
+ const res = await mockDeep(t, { global: true })
t.same(
res,
[
@@ -199,22 +170,10 @@ t.test('get list of package names as global', async t => {
],
'should return list of global packages with no extra flags'
)
- _flatOptions.global = false
- t.end()
})
t.test('limit depth', async t => {
- const fix = t.testdir({
- local: fixture,
- global: globalFixture,
- })
-
- prefix = resolve(fix, 'local')
- globalDir = resolve(fix, 'global/node_modules')
-
- _flatOptions.depth = 0
-
- const res = await installedDeep(npm, null)
+ const res = await mockDeep(t, { depth: 0 })
t.same(
res,
[
@@ -229,23 +188,10 @@ t.test('limit depth', async t => {
],
'should print only packages up to the specified depth'
)
- _flatOptions.depth = 0
- t.end()
})
t.test('limit depth as global', async t => {
- const fix = t.testdir({
- local: fixture,
- global: globalFixture,
- })
-
- prefix = resolve(fix, 'local')
- globalDir = resolve(fix, 'global/node_modules')
-
- _flatOptions.global = true
- _flatOptions.depth = 0
-
- const res = await installedDeep(npm, null)
+ const res = await mockDeep(t, { depth: 0, global: true })
t.same(
res,
[
@@ -256,7 +202,4 @@ t.test('limit depth as global', async t => {
],
'should reorder so that packages above that level depth goes last'
)
- _flatOptions.global = false
- _flatOptions.depth = 0
- t.end()
})
diff --git a/deps/npm/test/lib/utils/completion/installed-shallow.js b/deps/npm/test/lib/utils/completion/installed-shallow.js
index 5a65b6b6bfaef7..3666803979cb38 100644
--- a/deps/npm/test/lib/utils/completion/installed-shallow.js
+++ b/deps/npm/test/lib/utils/completion/installed-shallow.js
@@ -1,13 +1,10 @@
const t = require('tap')
-const { resolve } = require('path')
const installed = require('../../../../lib/utils/completion/installed-shallow.js')
+const mockNpm = require('../../../fixtures/mock-npm')
-const flatOptions = { global: false }
-const npm = { flatOptions }
-
-t.test('global not set, include globals with -g', async t => {
- const dir = t.testdir({
- global: {
+const mockShallow = async (t, config) => {
+ const res = await mockNpm(t, {
+ globalPrefixDir: {
node_modules: {
x: {},
'@scope': {
@@ -15,7 +12,7 @@ t.test('global not set, include globals with -g', async t => {
},
},
},
- local: {
+ prefixDir: {
node_modules: {
a: {},
'@scope': {
@@ -23,10 +20,13 @@ t.test('global not set, include globals with -g', async t => {
},
},
},
+ config: { global: false, ...config },
})
- npm.globalDir = resolve(dir, 'global/node_modules')
- npm.localDir = resolve(dir, 'local/node_modules')
- flatOptions.global = false
+ return res
+}
+
+t.test('global not set, include globals with -g', async t => {
+ const { npm } = await mockShallow(t)
const opt = { conf: { argv: { remain: [] } } }
const res = await installed(npm, opt)
t.strictSame(res.sort(), [
@@ -35,64 +35,21 @@ t.test('global not set, include globals with -g', async t => {
'a',
'@scope/b',
].sort())
- t.end()
})
t.test('global set, include globals and not locals', async t => {
- const dir = t.testdir({
- global: {
- node_modules: {
- x: {},
- '@scope': {
- y: {},
- },
- },
- },
- local: {
- node_modules: {
- a: {},
- '@scope': {
- b: {},
- },
- },
- },
- })
- npm.globalDir = resolve(dir, 'global/node_modules')
- npm.localDir = resolve(dir, 'local/node_modules')
- flatOptions.global = true
+ const { npm } = await mockShallow(t, { global: true })
const opt = { conf: { argv: { remain: [] } } }
const res = await installed(npm, opt)
t.strictSame(res.sort(), [
'@scope/y',
'x',
].sort())
- t.end()
})
t.test('more than 3 items in argv, skip it', async t => {
- const dir = t.testdir({
- global: {
- node_modules: {
- x: {},
- '@scope': {
- y: {},
- },
- },
- },
- local: {
- node_modules: {
- a: {},
- '@scope': {
- b: {},
- },
- },
- },
- })
- npm.globalDir = resolve(dir, 'global/node_modules')
- npm.localDir = resolve(dir, 'local/node_modules')
- flatOptions.global = false
+ const { npm } = await mockShallow(t)
const opt = { conf: { argv: { remain: [1, 2, 3, 4, 5, 6] } } }
const res = await installed(npm, opt)
t.strictSame(res, null)
- t.end()
})
diff --git a/deps/npm/test/lib/utils/config/definitions.js b/deps/npm/test/lib/utils/config/definitions.js
index 288166039bf6fe..8775824c6c131c 100644
--- a/deps/npm/test/lib/utils/config/definitions.js
+++ b/deps/npm/test/lib/utils/config/definitions.js
@@ -1,6 +1,6 @@
const t = require('tap')
const { resolve } = require('path')
-const mockGlobals = require('../../../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const tmock = require('../../../fixtures/tmock')
const pkg = require('../../../../package.json')
diff --git a/deps/npm/test/lib/utils/config/index.js b/deps/npm/test/lib/utils/config/index.js
index 90931a96d7aa24..010ec34888da49 100644
--- a/deps/npm/test/lib/utils/config/index.js
+++ b/deps/npm/test/lib/utils/config/index.js
@@ -1,7 +1,7 @@
const t = require('tap')
const config = require('../../../../lib/utils/config/index.js')
const definitions = require('../../../../lib/utils/config/definitions.js')
-const mockGlobals = require('../../../fixtures/mock-globals.js')
+const mockGlobals = require('@npmcli/mock-globals')
t.test('defaults', t => {
// just spot check a few of these to show that we got defaults assembled
diff --git a/deps/npm/test/lib/utils/did-you-mean.js b/deps/npm/test/lib/utils/did-you-mean.js
index d3cb3a24f0ae5c..d111c2f0029605 100644
--- a/deps/npm/test/lib/utils/did-you-mean.js
+++ b/deps/npm/test/lib/utils/did-you-mean.js
@@ -1,9 +1,8 @@
const t = require('tap')
-const { load: loadMockNpm } = require('../../fixtures/mock-npm.js')
const dym = require('../../../lib/utils/did-you-mean.js')
+
t.test('did-you-mean', async t => {
- const { npm } = await loadMockNpm(t)
t.test('with package.json', async t => {
const testdir = t.testdir({
'package.json': JSON.stringify({
@@ -17,27 +16,27 @@ t.test('did-you-mean', async t => {
}),
})
t.test('nistall', async t => {
- const result = await dym(npm, testdir, 'nistall')
+ const result = await dym(testdir, 'nistall')
t.match(result, 'npm install')
})
t.test('sttest', async t => {
- const result = await dym(npm, testdir, 'sttest')
+ const result = await dym(testdir, 'sttest')
t.match(result, 'npm test')
t.match(result, 'npm run posttest')
})
t.test('npz', async t => {
- const result = await dym(npm, testdir, 'npxx')
+ const result = await dym(testdir, 'npxx')
t.match(result, 'npm exec npx')
})
t.test('qwuijbo', async t => {
- const result = await dym(npm, testdir, 'qwuijbo')
+ const result = await dym(testdir, 'qwuijbo')
t.match(result, '')
})
})
t.test('with no package.json', t => {
const testdir = t.testdir({})
t.test('nistall', async t => {
- const result = await dym(npm, testdir, 'nistall')
+ const result = await dym(testdir, 'nistall')
t.match(result, 'npm install')
})
t.end()
@@ -49,7 +48,7 @@ t.test('did-you-mean', async t => {
}),
})
- const result = await dym(npm, testdir, 'nistall')
+ const result = await dym(testdir, 'nistall')
t.match(result, 'npm install')
})
})
diff --git a/deps/npm/test/lib/utils/display.js b/deps/npm/test/lib/utils/display.js
index 7a99dcb679c09c..b8f047668bfe4c 100644
--- a/deps/npm/test/lib/utils/display.js
+++ b/deps/npm/test/lib/utils/display.js
@@ -1,7 +1,7 @@
const t = require('tap')
const log = require('../../../lib/utils/log-shim')
const mockLogs = require('../../fixtures/mock-logs')
-const mockGlobals = require('../../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const tmock = require('../../fixtures/tmock')
const mockDisplay = (t, mocks) => {
diff --git a/deps/npm/test/lib/utils/error-message.js b/deps/npm/test/lib/utils/error-message.js
index 37b3bc6afeddc1..1ba5865592edba 100644
--- a/deps/npm/test/lib/utils/error-message.js
+++ b/deps/npm/test/lib/utils/error-message.js
@@ -2,7 +2,7 @@ const t = require('tap')
const { resolve } = require('path')
const fs = require('fs/promises')
const { load: _loadMockNpm } = require('../../fixtures/mock-npm.js')
-const mockGlobals = require('../../fixtures/mock-globals.js')
+const mockGlobals = require('@npmcli/mock-globals')
const tmock = require('../../fixtures/tmock')
const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot.js')
@@ -46,7 +46,9 @@ const loadMockNpm = async (t, { errorMocks, ...opts } = {}) => {
t.test('just simple messages', async t => {
const { errorMessage } = await loadMockNpm(t, {
+ prefixDir: { 'package-lock.json': '{}' },
command: 'audit',
+ exec: true,
})
const codes = [
'ENOAUDIT',
diff --git a/deps/npm/test/lib/utils/exit-handler.js b/deps/npm/test/lib/utils/exit-handler.js
index 8942d909225971..f553e1a2ea518d 100644
--- a/deps/npm/test/lib/utils/exit-handler.js
+++ b/deps/npm/test/lib/utils/exit-handler.js
@@ -6,7 +6,7 @@ const { join, resolve } = require('path')
const EventEmitter = require('events')
const { format } = require('../../../lib/utils/log-file')
const { load: loadMockNpm } = require('../../fixtures/mock-npm')
-const mockGlobals = require('../../fixtures/mock-globals')
+const mockGlobals = require('@npmcli/mock-globals')
const { cleanCwd, cleanDate } = require('../../fixtures/clean-snapshot')
const tmock = require('../../fixtures/tmock')
@@ -40,7 +40,7 @@ t.cleanSnapshot = (path) => cleanDate(cleanCwd(path))
mockGlobals(t, {
process: Object.assign(new EventEmitter(), {
// these are process properties that are needed in the running code and tests
- ...pick(process, 'execPath', 'stdout', 'stderr', 'cwd', 'chdir', 'env', 'umask'),
+ ...pick(process, 'execPath', 'stdout', 'stderr', 'stdin', 'cwd', 'chdir', 'env', 'umask'),
argv: ['/node', ...process.argv.slice(1)],
version: 'v1.0.0',
kill: () => {},
@@ -53,13 +53,11 @@ mockGlobals(t, {
}),
}, { replace: true })
-const mockExitHandler = async (t, { init, load, testdir, config, mocks, files } = {}) => {
+const mockExitHandler = async (t, { config, mocks, files, ...opts } = {}) => {
const errors = []
const { npm, logMocks, ...rest } = await loadMockNpm(t, {
- init,
- load,
- testdir,
+ ...opts,
mocks: {
'{ROOT}/package.json': {
version: '1.0.0',
@@ -592,13 +590,14 @@ t.test('exits uncleanly when only emitting exit event', async (t) => {
t.match(logs.error, [['', 'Exit handler never called!']])
t.equal(process.exitCode, 1, 'exitCode coerced to 1')
- t.end()
})
t.test('do no fancy handling for shellouts', async t => {
- const { exitHandler, npm, logs } = await mockExitHandler(t)
-
- await npm.cmd('exec')
+ const { exitHandler, logs } = await mockExitHandler(t, {
+ command: 'exec',
+ exec: true,
+ argv: ['-c', 'exit'],
+ })
const loudNoises = () =>
logs.filter(([level]) => ['warn', 'error'].includes(level))
@@ -614,7 +613,6 @@ t.test('do no fancy handling for shellouts', async t => {
t.equal(process.exitCode, 1, 'got expected exit code')
// should log some warnings and errors, because something weird happened
t.strictNotSame(loudNoises(), [], 'bring the noise')
- t.end()
})
t.test('shellout with code=0 (extra weird?)', async t => {
@@ -622,6 +620,4 @@ t.test('do no fancy handling for shellouts', async t => {
t.equal(process.exitCode, 1, 'got expected exit code')
t.strictNotSame(loudNoises(), [], 'bring the noise')
})
-
- t.end()
})
diff --git a/deps/npm/test/lib/utils/explain-dep.js b/deps/npm/test/lib/utils/explain-dep.js
index e5389fd26d7967..06174f36a7ffcc 100644
--- a/deps/npm/test/lib/utils/explain-dep.js
+++ b/deps/npm/test/lib/utils/explain-dep.js
@@ -1,269 +1,277 @@
const { resolve } = require('path')
const t = require('tap')
-const Chalk = require('chalk')
const { explainNode, printNode } = require('../../../lib/utils/explain-dep.js')
const { cleanCwd } = require('../../fixtures/clean-snapshot')
-const testdir = t.testdirName
-const color = new Chalk.Instance({ level: Chalk.level })
-const noColor = new Chalk.Instance({ level: 0 })
-
t.cleanSnapshot = (str) => cleanCwd(str)
-const cases = {
- prodDep: {
- name: 'prod-dep',
- version: '1.2.3',
- location: 'node_modules/prod-dep',
- dependents: [
- {
- type: 'prod',
- name: 'prod-dep',
- spec: '1.x',
- from: {
- location: '/path/to/project',
+const getCases = (testdir) => {
+ const cases = {
+ prodDep: {
+ name: 'prod-dep',
+ version: '1.2.3',
+ location: 'node_modules/prod-dep',
+ dependents: [
+ {
+ type: 'prod',
+ name: 'prod-dep',
+ spec: '1.x',
+ from: {
+ location: '/path/to/project',
+ },
},
- },
- ],
- },
+ ],
+ },
- deepDev: {
- name: 'deep-dev',
- version: '2.3.4',
- location: 'node_modules/deep-dev',
- dev: true,
- dependents: [
- {
- type: 'prod',
- name: 'deep-dev',
- spec: '2.x',
- from: {
- name: 'metadev',
- version: '3.4.5',
- location: 'node_modules/dev/node_modules/metadev',
- dependents: [
- {
- type: 'prod',
- name: 'metadev',
- spec: '3.x',
- from: {
- name: 'topdev',
- version: '4.5.6',
- location: 'node_modules/topdev',
- dependents: [
- {
- type: 'dev',
- name: 'topdev',
- spec: '4.x',
- from: {
- location: '/path/to/project',
+ deepDev: {
+ name: 'deep-dev',
+ version: '2.3.4',
+ location: 'node_modules/deep-dev',
+ dev: true,
+ dependents: [
+ {
+ type: 'prod',
+ name: 'deep-dev',
+ spec: '2.x',
+ from: {
+ name: 'metadev',
+ version: '3.4.5',
+ location: 'node_modules/dev/node_modules/metadev',
+ dependents: [
+ {
+ type: 'prod',
+ name: 'metadev',
+ spec: '3.x',
+ from: {
+ name: 'topdev',
+ version: '4.5.6',
+ location: 'node_modules/topdev',
+ dependents: [
+ {
+ type: 'dev',
+ name: 'topdev',
+ spec: '4.x',
+ from: {
+ location: '/path/to/project',
+ },
},
- },
- ],
+ ],
+ },
},
- },
- ],
+ ],
+ },
},
- },
- ],
- },
+ ],
+ },
- optional: {
- name: 'optdep',
- version: '1.0.0',
- location: 'node_modules/optdep',
- optional: true,
- dependents: [
- {
- type: 'optional',
- name: 'optdep',
+ optional: {
+ name: 'optdep',
+ version: '1.0.0',
+ location: 'node_modules/optdep',
+ optional: true,
+ dependents: [
+ {
+ type: 'optional',
+ name: 'optdep',
+ spec: '1.0.0',
+ from: {
+ location: '/path/to/project',
+ },
+ },
+ ],
+ },
+
+ peer: {
+ name: 'peer',
+ version: '1.0.0',
+ location: 'node_modules/peer',
+ peer: true,
+ dependents: [
+ {
+ type: 'peer',
+ name: 'peer',
+ spec: '1.0.0',
+ from: {
+ location: '/path/to/project',
+ },
+ },
+ ],
+ },
+
+ bundled: {
+ name: 'bundle-of-joy',
+ version: '1.0.0',
+ location: 'node_modules/bundle-of-joy',
+ bundled: true,
+ dependents: [
+ {
+ type: 'prod',
+ name: 'prod-dep',
+ spec: '1.x',
+ bundled: true,
+ from: {
+ location: '/path/to/project',
+ },
+ },
+ ],
+ },
+
+ extraneous: {
+ name: 'extra-neos',
+ version: '1337.420.69-lol',
+ location: 'node_modules/extra-neos',
+ dependents: [],
+ extraneous: true,
+ },
+
+ overridden: {
+ name: 'overridden-root',
+ version: '1.0.0',
+ location: 'node_modules/overridden-root',
+ overridden: true,
+ dependents: [{
+ type: 'prod',
+ name: 'overridden-dep',
spec: '1.0.0',
+ rawSpec: '^2.0.0',
+ overridden: true,
from: {
location: '/path/to/project',
},
- },
- ],
- },
+ }],
+ },
+ }
- peer: {
- name: 'peer',
+ cases.manyDeps = {
+ name: 'manydep',
version: '1.0.0',
- location: 'node_modules/peer',
- peer: true,
dependents: [
{
- type: 'peer',
- name: 'peer',
+ type: 'prod',
+ name: 'manydep',
spec: '1.0.0',
+ from: cases.prodDep,
+ },
+ {
+ type: 'optional',
+ name: 'manydep',
+ spec: '1.x',
+ from: cases.optional,
+ },
+ {
+ type: 'prod',
+ name: 'manydep',
+ spec: '1.0.x',
+ from: cases.extraneous,
+ },
+ {
+ type: 'dev',
+ name: 'manydep',
+ spec: '*',
+ from: cases.deepDev,
+ },
+ {
+ type: 'peer',
+ name: 'manydep',
+ spec: '>1.0.0-beta <1.0.1',
+ from: cases.peer,
+ },
+ {
+ type: 'prod',
+ name: 'manydep',
+ spec: '>1.0.0-beta <1.0.1',
from: {
location: '/path/to/project',
},
},
- ],
- },
-
- bundled: {
- name: 'bundle-of-joy',
- version: '1.0.0',
- location: 'node_modules/bundle-of-joy',
- bundled: true,
- dependents: [
{
type: 'prod',
- name: 'prod-dep',
- spec: '1.x',
- bundled: true,
+ name: 'manydep',
+ spec: '1',
from: {
- location: '/path/to/project',
+ name: 'a package with a pretty long name',
+ version: '1.2.3',
+ dependents: {
+ location: '/path/to/project',
+ },
},
},
- ],
- },
-
- extraneous: {
- name: 'extra-neos',
- version: '1337.420.69-lol',
- location: 'node_modules/extra-neos',
- dependents: [],
- extraneous: true,
- },
-
- overridden: {
- name: 'overridden-root',
- version: '1.0.0',
- location: 'node_modules/overridden-root',
- overridden: true,
- dependents: [{
- type: 'prod',
- name: 'overridden-dep',
- spec: '1.0.0',
- rawSpec: '^2.0.0',
- overridden: true,
- from: {
- location: '/path/to/project',
- },
- }],
- },
-}
-
-cases.manyDeps = {
- name: 'manydep',
- version: '1.0.0',
- dependents: [
- {
- type: 'prod',
- name: 'manydep',
- spec: '1.0.0',
- from: cases.prodDep,
- },
- {
- type: 'optional',
- name: 'manydep',
- spec: '1.x',
- from: cases.optional,
- },
- {
- type: 'prod',
- name: 'manydep',
- spec: '1.0.x',
- from: cases.extraneous,
- },
- {
- type: 'dev',
- name: 'manydep',
- spec: '*',
- from: cases.deepDev,
- },
- {
- type: 'peer',
- name: 'manydep',
- spec: '>1.0.0-beta <1.0.1',
- from: cases.peer,
- },
- {
- type: 'prod',
- name: 'manydep',
- spec: '>1.0.0-beta <1.0.1',
- from: {
- location: '/path/to/project',
- },
- },
- {
- type: 'prod',
- name: 'manydep',
- spec: '1',
- from: {
- name: 'a package with a pretty long name',
- version: '1.2.3',
- dependents: {
- location: '/path/to/project',
+ {
+ type: 'prod',
+ name: 'manydep',
+ spec: '1',
+ from: {
+ name: 'another package with a pretty long name',
+ version: '1.2.3',
+ dependents: {
+ location: '/path/to/project',
+ },
},
},
- },
- {
- type: 'prod',
- name: 'manydep',
- spec: '1',
- from: {
- name: 'another package with a pretty long name',
- version: '1.2.3',
- dependents: {
- location: '/path/to/project',
+ {
+ type: 'prod',
+ name: 'manydep',
+ spec: '1',
+ from: {
+ name: 'yet another a package with a pretty long name',
+ version: '1.2.3',
+ dependents: {
+ location: '/path/to/project',
+ },
},
},
- },
- {
- type: 'prod',
- name: 'manydep',
- spec: '1',
- from: {
- name: 'yet another a package with a pretty long name',
- version: '1.2.3',
- dependents: {
- location: '/path/to/project',
- },
+ ],
+ }
+
+ cases.workspaces = {
+ name: 'a',
+ version: '1.0.0',
+ location: 'a',
+ isWorkspace: true,
+ dependents: [],
+ linksIn: [
+ {
+ name: 'a',
+ version: '1.0.0',
+ location: 'node_modules/a',
+ isWorkspace: true,
+ dependents: [
+ {
+ type: 'workspace',
+ name: 'a',
+ spec: `file:${resolve(testdir, 'ws-project', 'a')}`,
+ from: { location: resolve(testdir, 'ws-project') },
+ },
+ ],
},
- },
- ],
-}
+ ],
+ }
-cases.workspaces = {
- name: 'a',
- version: '1.0.0',
- location: 'a',
- isWorkspace: true,
- dependents: [],
- linksIn: [
- {
- name: 'a',
- version: '1.0.0',
- location: 'node_modules/a',
- isWorkspace: true,
- dependents: [
- {
- type: 'workspace',
- name: 'a',
- spec: `file:${resolve(testdir, 'ws-project', 'a')}`,
- from: { location: resolve(testdir, 'ws-project') },
- },
- ],
- },
- ],
+ return cases
}
-for (const [name, expl] of Object.entries(cases)) {
- t.test(name, t => {
- t.matchSnapshot(printNode(expl, color), 'print color')
- t.matchSnapshot(printNode(expl, noColor), 'print nocolor')
- t.matchSnapshot(explainNode(expl, Infinity, color), 'explain color deep')
- t.matchSnapshot(explainNode(expl, 2, noColor), 'explain nocolor shallow')
- t.end()
- })
-}
+t.test('basic', async t => {
+ const { Chalk } = await import('chalk')
+ const color = new Chalk({ level: 3 })
+ const noColor = new Chalk({ level: 0 })
+
+ const testdir = t.testdir()
+ const cases = getCases(testdir)
+
+ for (const [name, expl] of Object.entries(getCases(testdir))) {
+ t.test(name, t => {
+ t.matchSnapshot(printNode(expl, color), 'print color')
+ t.matchSnapshot(printNode(expl, noColor), 'print nocolor')
+ t.matchSnapshot(explainNode(expl, Infinity, color), 'explain color deep')
+ t.matchSnapshot(explainNode(expl, 2, noColor), 'explain nocolor shallow')
+ t.end()
+ })
+ }
-// make sure that we show the last one if it's the only one that would
-// hit the ...
-cases.manyDeps.dependents.pop()
-t.matchSnapshot(explainNode(cases.manyDeps, 2, noColor), 'ellipses test one')
-cases.manyDeps.dependents.pop()
-t.matchSnapshot(explainNode(cases.manyDeps, 2, noColor), 'ellipses test two')
+ // make sure that we show the last one if it's the only one that would
+ // hit the ...
+ cases.manyDeps.dependents.pop()
+ t.matchSnapshot(explainNode(cases.manyDeps, 2, noColor), 'ellipses test one')
+ cases.manyDeps.dependents.pop()
+ t.matchSnapshot(explainNode(cases.manyDeps, 2, noColor), 'ellipses test two')
+})
diff --git a/deps/npm/test/lib/utils/explain-eresolve.js b/deps/npm/test/lib/utils/explain-eresolve.js
index 0f60556ef2ac98..157cc97a5a3cb6 100644
--- a/deps/npm/test/lib/utils/explain-eresolve.js
+++ b/deps/npm/test/lib/utils/explain-eresolve.js
@@ -1,29 +1,31 @@
const t = require('tap')
-const Chalk = require('chalk')
const { explain, report } = require('../../../lib/utils/explain-eresolve.js')
const cases = require('../../fixtures/eresolve-explanations.js')
-const color = new Chalk.Instance({ level: Chalk.level })
-const noColor = new Chalk.Instance({ level: 0 })
+t.test('basic', async t => {
+ const { Chalk } = await import('chalk')
+ const color = new Chalk({ level: 3 })
+ const noColor = new Chalk({ level: 0 })
-for (const [name, expl] of Object.entries(cases)) {
+ for (const [name, expl] of Object.entries(cases)) {
// no sense storing the whole contents of each object in the snapshot
// we can trust that JSON.stringify still works just fine.
- expl.toJSON = () => ({ name, json: true })
+ expl.toJSON = () => ({ name, json: true })
- t.test(name, t => {
- const colorReport = report(expl, color, noColor)
- t.matchSnapshot(colorReport.explanation, 'report with color')
- t.matchSnapshot(colorReport.file, 'report from color')
+ t.test(name, t => {
+ const colorReport = report(expl, color, noColor)
+ t.matchSnapshot(colorReport.explanation, 'report with color')
+ t.matchSnapshot(colorReport.file, 'report from color')
- const noColorReport = report(expl, noColor, noColor)
- t.matchSnapshot(noColorReport.explanation, 'report with no color')
- t.equal(noColorReport.file, colorReport.file, 'same report written for object')
+ const noColorReport = report(expl, noColor, noColor)
+ t.matchSnapshot(noColorReport.explanation, 'report with no color')
+ t.equal(noColorReport.file, colorReport.file, 'same report written for object')
- t.matchSnapshot(explain(expl, color, 2), 'explain with color, depth of 2')
- t.matchSnapshot(explain(expl, noColor, 6), 'explain with no color, depth of 6')
+ t.matchSnapshot(explain(expl, color, 2), 'explain with color, depth of 2')
+ t.matchSnapshot(explain(expl, noColor, 6), 'explain with no color, depth of 6')
- t.end()
- })
-}
+ t.end()
+ })
+ }
+})
diff --git a/deps/npm/test/lib/utils/open-url-prompt.js b/deps/npm/test/lib/utils/open-url-prompt.js
index faf2ab32587af1..c889313e162c7f 100644
--- a/deps/npm/test/lib/utils/open-url-prompt.js
+++ b/deps/npm/test/lib/utils/open-url-prompt.js
@@ -1,138 +1,120 @@
const t = require('tap')
-const mockGlobals = require('../../fixtures/mock-globals.js')
const EventEmitter = require('events')
const tmock = require('../../fixtures/tmock')
-
-const OUTPUT = []
-const output = (...args) => OUTPUT.push(args)
-const npm = {
- _config: {
- json: false,
- browser: true,
- },
- config: {
- get: k => npm._config[k],
- set: (k, v) => {
- npm._config[k] = v
+const mockNpm = require('../../fixtures/mock-npm')
+
+const mockOpenUrlPrompt = async (t, {
+ questionShouldResolve = true,
+ openUrlPromptInterrupted = false,
+ openerResult = null,
+ isTTY = true,
+ emitter = null,
+ url: openUrl = 'https://www.npmjs.com',
+ ...config
+}) => {
+ const mock = await mockNpm(t, {
+ globals: {
+ 'process.stdin.isTTY': isTTY,
+ 'process.stdout.isTTY': isTTY,
},
- },
- output,
-}
-
-let openerUrl = null
-let openerOpts = null
-let openerResult = null
-
-let questionShouldResolve = true
-let openUrlPromptInterrupted = false
+ config,
+ })
-const readline = {
- createInterface: () => ({
- question: (_q, cb) => {
- if (questionShouldResolve === true) {
- cb()
- }
+ let openerUrl = null
+ let openerOpts = null
+
+ const openUrlPrompt = tmock(t, '{LIB}/utils/open-url-prompt.js', {
+ '@npmcli/promise-spawn': {
+ open: async (url, options) => {
+ openerUrl = url
+ openerOpts = options
+ if (openerResult) {
+ throw openerResult
+ }
+ },
},
- close: () => {},
- on: (_signal, cb) => {
- if (openUrlPromptInterrupted && _signal === 'SIGINT') {
- cb()
- }
+ readline: {
+ createInterface: () => ({
+ question: (_q, cb) => {
+ if (questionShouldResolve === true) {
+ cb()
+ }
+ },
+ close: () => {},
+ on: (_signal, cb) => {
+ if (openUrlPromptInterrupted && _signal === 'SIGINT') {
+ cb()
+ }
+ },
+ }),
},
- }),
-}
+ })
-const openUrlPrompt = tmock(t, '{LIB}/utils/open-url-prompt.js', {
- '@npmcli/promise-spawn': {
- open: async (url, options) => {
- openerUrl = url
- openerOpts = options
- if (openerResult) {
- throw openerResult
- }
- },
- },
- readline,
-})
+ let error
+ const args = [mock.npm, openUrl, 'npm home', 'prompt']
+ if (emitter) {
+ mock.open = openUrlPrompt(...args, emitter)
+ } else {
+ await openUrlPrompt(...args).catch((er) => error = er)
+ }
-mockGlobals(t, {
- 'process.stdin.isTTY': true,
- 'process.stdout.isTTY': true,
-})
+ return {
+ ...mock,
+ openerUrl,
+ openerOpts,
+ OUTPUT: mock.joinedOutput(),
+ emitter,
+ error,
+ }
+}
t.test('does not open a url in non-interactive environments', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
-
- mockGlobals(t, {
- 'process.stdin.isTTY': false,
- 'process.stdout.isTTY': false,
- })
+ const { openerUrl, openerOpts } = await mockOpenUrlPrompt(t, { isTTY: false })
- await openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt')
t.equal(openerUrl, null, 'did not open')
t.same(openerOpts, null, 'did not open')
})
t.test('opens a url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm._config.browser = true
- })
+ const { OUTPUT, openerUrl, openerOpts } = await mockOpenUrlPrompt(t, { browser: true })
- npm._config.browser = 'browser'
- await openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt')
t.equal(openerUrl, 'https://www.npmjs.com', 'opened the given url')
- t.same(openerOpts, { command: 'browser' }, 'passed command as null (the default)')
+ t.same(openerOpts, { command: null }, 'passed command as null (the default)')
t.matchSnapshot(OUTPUT)
})
+t.test('opens a url with browser string', async t => {
+ const { openerUrl, openerOpts } = await mockOpenUrlPrompt(t, { browser: 'firefox' })
+
+ t.equal(openerUrl, 'https://www.npmjs.com', 'opened the given url')
+ // FIXME: browser string is parsed as a boolean in config layer
+ // this is a bug that should be fixed or the config should not allow it
+ t.same(openerOpts, { command: null }, 'passed command as null (the default)')
+})
+
t.test('prints json output', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm._config.json = false
- })
+ const { OUTPUT } = await mockOpenUrlPrompt(t, { json: true })
- npm._config.json = true
- await openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt')
t.matchSnapshot(OUTPUT)
})
t.test('returns error for non-https url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
+ const { error, OUTPUT, openerUrl, openerOpts } = await mockOpenUrlPrompt(t, {
+ url: 'ftp://www.npmjs.com',
})
- await t.rejects(
- openUrlPrompt(npm, 'ftp://www.npmjs.com', 'npm home', 'prompt'),
- /Invalid URL/,
- 'got the correct error'
- )
+
+ t.match(error, /Invalid URL/, 'got the correct error')
t.equal(openerUrl, null, 'did not open')
t.same(openerOpts, null, 'did not open')
- t.same(OUTPUT, [], 'printed no output')
+ t.same(OUTPUT, '', 'printed no output')
})
t.test('does not open url if canceled', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- questionShouldResolve = true
- })
-
- questionShouldResolve = false
const emitter = new EventEmitter()
-
- const open = openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt', emitter)
+ const { openerUrl, openerOpts, open } = await mockOpenUrlPrompt(t, {
+ questionShouldResolve: false,
+ emitter,
+ })
emitter.emit('abort')
@@ -143,41 +125,21 @@ t.test('does not open url if canceled', async t => {
})
t.test('returns error when opener errors', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- openerResult = null
- OUTPUT.length = 0
+ const { error, openerUrl } = await mockOpenUrlPrompt(t, {
+ openerResult: new Error('Opener failed'),
})
- openerResult = new Error('Opener failed')
-
- await t.rejects(
- openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt'),
- /Opener failed/,
- 'got the correct error'
- )
+ t.match(error, /Opener failed/, 'got the correct error')
t.equal(openerUrl, 'https://www.npmjs.com', 'did not open')
})
t.test('throws "canceled" error on SIGINT', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- questionShouldResolve = true
- openUrlPromptInterrupted = false
- })
-
- questionShouldResolve = false
- openUrlPromptInterrupted = true
const emitter = new EventEmitter()
+ const { open } = await mockOpenUrlPrompt(t, {
+ questionShouldResolve: false,
+ openUrlPromptInterrupted: true,
+ emitter,
+ })
- const open = openUrlPrompt(npm, 'https://www.npmjs.com', 'npm home', 'prompt', emitter)
-
- try {
- await open
- } catch (err) {
- t.equal(err.message, 'canceled')
- }
+ await t.rejects(open, /canceled/, 'message is canceled')
})
diff --git a/deps/npm/test/lib/utils/open-url.js b/deps/npm/test/lib/utils/open-url.js
index 28a11b3609c674..0ce1b57aa5f9f7 100644
--- a/deps/npm/test/lib/utils/open-url.js
+++ b/deps/npm/test/lib/utils/open-url.js
@@ -1,197 +1,143 @@
const t = require('tap')
const tmock = require('../../fixtures/tmock')
+const mockNpm = require('../../fixtures/mock-npm')
-const OUTPUT = []
-const output = (...args) => OUTPUT.push(args)
-const npm = {
- _config: {
- json: false,
- browser: true,
- },
- config: {
- get: k => npm._config[k],
- set: (k, v) => {
- npm._config[k] = v
- },
- },
- output,
-}
+const mockOpenUrl = async (t, args, { openerResult, ...config } = {}) => {
+ let openerUrl = null
+ let openerOpts = null
+
+ const open = async (url, options) => {
+ openerUrl = url
+ openerOpts = options
+ if (openerResult) {
+ throw openerResult
+ }
+ }
+
+ const mock = await mockNpm(t, { config })
+
+ const openUrl = tmock(t, '{LIB}/utils/open-url.js', {
+ '@npmcli/promise-spawn': { open },
+ })
-let openerUrl = null
-let openerOpts = null
-let openerResult = null
+ const openWithNpm = (...a) => openUrl(mock.npm, ...a)
-const open = async (url, options) => {
- openerUrl = url
- openerOpts = options
- if (openerResult) {
- throw openerResult
+ if (args) {
+ await openWithNpm(...args)
}
-}
-const openUrl = tmock(t, '{LIB}/utils/open-url.js', {
- '@npmcli/promise-spawn': {
- open,
- },
-})
+ return {
+ ...mock,
+ openUrl: openWithNpm,
+ openerUrl: () => openerUrl,
+ openerOpts: () => openerOpts,
+ }
+}
t.test('opens a url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
- await openUrl(npm, 'https://www.npmjs.com', 'npm home')
- t.equal(openerUrl, 'https://www.npmjs.com', 'opened the given url')
- t.same(openerOpts, { command: null }, 'passed command as null (the default)')
- t.same(OUTPUT, [], 'printed no output')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com', 'npm home'])
+ t.equal(openerUrl(), 'https://www.npmjs.com', 'opened the given url')
+ t.same(openerOpts(), { command: null }, 'passed command as null (the default)')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('returns error for non-https url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
+ const { openUrl, openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t)
await t.rejects(
- openUrl(npm, 'ftp://www.npmjs.com', 'npm home'),
+ openUrl('ftp://www.npmjs.com', 'npm home'),
/Invalid URL/,
'got the correct error'
)
- t.equal(openerUrl, null, 'did not open')
- t.same(openerOpts, null, 'did not open')
- t.same(OUTPUT, [], 'printed no output')
+ t.equal(openerUrl(), null, 'did not open')
+ t.same(openerOpts(), null, 'did not open')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('returns error for file url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
+ const { openUrl, openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t)
await t.rejects(
- openUrl(npm, 'file:///usr/local/bin/ls', 'npm home'),
+ openUrl('file:///usr/local/bin/ls', 'npm home'),
/Invalid URL/,
'got the correct error'
)
- t.equal(openerUrl, null, 'did not open')
- t.same(openerOpts, null, 'did not open')
- t.same(OUTPUT, [], 'printed no output')
+ t.equal(openerUrl(), null, 'did not open')
+ t.same(openerOpts(), null, 'did not open')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('file url allowed if explicitly asked for', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
- await openUrl(npm, 'file:///man/page/npm-install', 'npm home', true)
- t.equal(openerUrl, 'file:///man/page/npm-install', 'opened the given url')
- t.same(openerOpts, { command: null }, 'passed command as null (the default)')
- t.same(OUTPUT, [], 'printed no output')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['file:///man/page/npm-install', 'npm home', true])
+ t.equal(openerUrl(), 'file:///man/page/npm-install', 'opened the given url')
+ t.same(openerOpts(), { command: null }, 'passed command as null (the default)')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('returns error for non-parseable url', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
+ const { openUrl, openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t)
await t.rejects(
- openUrl(npm, 'git+ssh://user@host:repo.git', 'npm home'),
+ openUrl('git+ssh://user@host:repo.git', 'npm home'),
/Invalid URL/,
'got the correct error'
)
- t.equal(openerUrl, null, 'did not open')
- t.same(openerOpts, null, 'did not open')
- t.same(OUTPUT, [], 'printed no output')
+ t.equal(openerUrl(), null, 'did not open')
+ t.same(openerOpts(), null, 'did not open')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('encodes non-URL-safe characters in url provided', async t => {
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- })
- await openUrl(npm, 'https://www.npmjs.com/|cat', 'npm home')
- t.equal(openerUrl, 'https://www.npmjs.com/%7Ccat', 'opened the encoded url')
- t.same(openerOpts, { command: null }, 'passed command as null (the default)')
- t.same(OUTPUT, [], 'printed no output')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com/|cat', 'npm home'])
+ t.equal(openerUrl(), 'https://www.npmjs.com/%7Ccat', 'opened the encoded url')
+ t.same(openerOpts(), { command: null }, 'passed command as null (the default)')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('opens a url with the given browser', async t => {
- npm.config.set('browser', 'chrome')
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm.config.set('browser', true)
- })
- await openUrl(npm, 'https://www.npmjs.com', 'npm home')
- t.equal(openerUrl, 'https://www.npmjs.com', 'opened the given url')
- t.same(openerOpts, { command: 'chrome' }, 'passed the given browser as command')
- t.same(OUTPUT, [], 'printed no output')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com', 'npm home'], { browser: 'chrome' })
+ t.equal(openerUrl(), 'https://www.npmjs.com', 'opened the given url')
+ // FIXME: browser string is parsed as a boolean in config layer
+ // this is a bug that should be fixed or the config should not allow it
+ t.same(openerOpts(), { command: null }, 'passed the given browser as command')
+ t.same(joinedOutput(), '', 'printed no output')
})
t.test('prints where to go when browser is disabled', async t => {
- npm.config.set('browser', false)
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm.config.set('browser', true)
- })
- await openUrl(npm, 'https://www.npmjs.com', 'npm home')
- t.equal(openerUrl, null, 'did not open')
- t.same(openerOpts, null, 'did not open')
- t.equal(OUTPUT.length, 1, 'got one logged message')
- t.equal(OUTPUT[0].length, 1, 'logged message had one value')
- t.matchSnapshot(OUTPUT[0][0], 'printed expected message')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com', 'npm home'], { browser: false })
+ t.equal(openerUrl(), null, 'did not open')
+ t.same(openerOpts(), null, 'did not open')
+ t.matchSnapshot(joinedOutput(), 'printed expected message')
})
t.test('prints where to go when browser is disabled and json is enabled', async t => {
- npm.config.set('browser', false)
- npm.config.set('json', true)
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm.config.set('browser', true)
- npm.config.set('json', false)
- })
- await openUrl(npm, 'https://www.npmjs.com', 'npm home')
- t.equal(openerUrl, null, 'did not open')
- t.same(openerOpts, null, 'did not open')
- t.equal(OUTPUT.length, 1, 'got one logged message')
- t.equal(OUTPUT[0].length, 1, 'logged message had one value')
- t.matchSnapshot(OUTPUT[0][0], 'printed expected message')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com', 'npm home'], { browser: false, json: true })
+ t.equal(openerUrl(), null, 'did not open')
+ t.same(openerOpts(), null, 'did not open')
+ t.matchSnapshot(joinedOutput(), 'printed expected message')
})
t.test('prints where to go when given browser does not exist', async t => {
- npm.config.set('browser', 'firefox')
- openerResult = Object.assign(new Error('failed'), { code: 'ENOENT' })
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm.config.set('browser', true)
- })
- await openUrl(npm, 'https://www.npmjs.com', 'npm home')
- t.equal(openerUrl, 'https://www.npmjs.com', 'tried to open the correct url')
- t.same(openerOpts, { command: 'firefox' }, 'tried to use the correct browser')
- t.equal(OUTPUT.length, 1, 'got one logged message')
- t.equal(OUTPUT[0].length, 1, 'logged message had one value')
- t.matchSnapshot(OUTPUT[0][0], 'printed expected message')
+ const { openerUrl, openerOpts, joinedOutput } = await mockOpenUrl(t,
+ ['https://www.npmjs.com', 'npm home'],
+ {
+ openerResult: Object.assign(new Error('failed'), { code: 'ENOENT' }),
+ }
+ )
+
+ t.equal(openerUrl(), 'https://www.npmjs.com', 'tried to open the correct url')
+ t.same(openerOpts(), { command: null }, 'tried to use the correct browser')
+ t.matchSnapshot(joinedOutput(), 'printed expected message')
})
t.test('handles unknown opener error', async t => {
- npm.config.set('browser', 'firefox')
- openerResult = Object.assign(new Error('failed'), { code: 'ENOBRIAN' })
- t.teardown(() => {
- openerUrl = null
- openerOpts = null
- OUTPUT.length = 0
- npm.config.set('browser', true)
+ const { openUrl } = await mockOpenUrl(t, null, {
+ browser: 'firefox',
+ openerResult: Object.assign(new Error('failed'), { code: 'ENOBRIAN' }),
})
- t.rejects(openUrl(npm, 'https://www.npmjs.com', 'npm home'), 'failed', 'got the correct error')
+
+ await t.rejects(openUrl('https://www.npmjs.com', 'npm home'), 'failed', 'got the correct error')
})
diff --git a/deps/npm/test/lib/utils/reify-finish.js b/deps/npm/test/lib/utils/reify-finish.js
index ee112203a24bc8..a2ca6e43679627 100644
--- a/deps/npm/test/lib/utils/reify-finish.js
+++ b/deps/npm/test/lib/utils/reify-finish.js
@@ -1,81 +1,95 @@
const t = require('tap')
+const fs = require('fs/promises')
+const { join } = require('path')
const { cleanNewlines } = require('../../fixtures/clean-snapshot')
const tmock = require('../../fixtures/tmock')
+const mockNpm = require('../../fixtures/mock-npm')
-const npm = {
- config: {
- data: {
- get: () => builtinConfMock,
- },
- },
+// windowwwwwwssss!!!!!
+const readRc = async (dir) => {
+ const res = await fs.readFile(join(dir, 'npmrc'), 'utf8').catch(() => '')
+ return cleanNewlines(res).trim()
}
-const builtinConfMock = {
- loadError: new Error('no builtin config'),
- raw: { hasBuiltinConfig: true, x: 'y', nested: { foo: 'bar' } },
-}
+const mockReififyFinish = async (t, { actualTree = {}, otherDirs = {}, ...config }) => {
+ const mock = await mockNpm(t, {
+ npm: ({ other }) => ({
+ npmRoot: other,
+ }),
+ otherDirs: {
+ npmrc: `key=value`,
+ ...otherDirs,
+ },
+ config,
+ })
-const reifyOutput = () => {}
+ const reifyFinish = tmock(t, '{LIB}/utils/reify-finish.js', {
+ '{LIB}/utils/reify-output.js': () => {},
+ })
-let expectWrite = false
-const realFs = require('fs')
-const fs = {
- ...realFs,
- promises: realFs.promises && {
- ...realFs.promises,
- writeFile: async (path, data) => {
- if (!expectWrite) {
- throw new Error('did not expect to write builtin config file')
- }
- return realFs.promises.writeFile(path, data)
- },
- },
-}
+ await reifyFinish(mock.npm, {
+ options: { global: mock.npm.global },
+ actualTree: typeof actualTree === 'function' ? actualTree(mock) : actualTree,
+ })
-const reifyFinish = tmock(t, '{LIB}/utils/reify-finish.js', {
- fs,
- '{LIB}/utils/reify-output.js': reifyOutput,
-})
+ const builtinRc = {
+ raw: await readRc(mock.other),
+ data: Object.fromEntries(Object.entries(mock.npm.config.data.get('builtin').data)),
+ }
+
+ return {
+ builtinRc,
+ ...mock,
+ }
+}
-t.test('should not write if not global', async t => {
- expectWrite = false
- await reifyFinish(npm, {
- options: { global: false },
- actualTree: {},
+t.test('ok by default', async t => {
+ const mock = await mockReififyFinish(t, {
+ global: false,
})
+ t.same(mock.builtinRc.raw, 'key=value')
+ t.strictSame(mock.builtinRc.data, { key: 'value' })
})
t.test('should not write if no global npm module', async t => {
- expectWrite = false
- await reifyFinish(npm, {
- options: { global: true },
+ const mock = await mockReififyFinish(t, {
+ global: true,
actualTree: {
inventory: new Map(),
},
})
+ t.same(mock.builtinRc.raw, 'key=value')
+ t.strictSame(mock.builtinRc.data, { key: 'value' })
})
t.test('should not write if builtin conf had load error', async t => {
- expectWrite = false
- await reifyFinish(npm, {
- options: { global: true },
+ const mock = await mockReififyFinish(t, {
+ global: true,
+ otherDirs: {
+ npmrc: {},
+ },
actualTree: {
inventory: new Map([['node_modules/npm', {}]]),
},
})
+ t.same(mock.builtinRc.raw, '')
+ t.strictSame(mock.builtinRc.data, {})
})
t.test('should write if everything above passes', async t => {
- expectWrite = true
- delete builtinConfMock.loadError
- const path = t.testdir()
- await reifyFinish(npm, {
- options: { global: true },
- actualTree: {
- inventory: new Map([['node_modules/npm', { path }]]),
+ const mock = await mockReififyFinish(t, {
+ global: true,
+ otherDirs: {
+ 'new-npm': {},
},
+ actualTree: ({ other }) => ({
+ inventory: new Map([['node_modules/npm', { path: join(other, 'new-npm') }]]),
+ }),
})
- // windowwwwwwssss!!!!!
- const data = cleanNewlines(fs.readFileSync(`${path}/npmrc`, 'utf8'))
- t.matchSnapshot(data, 'written config')
+
+ t.same(mock.builtinRc.raw, 'key=value')
+ t.strictSame(mock.builtinRc.data, { key: 'value' })
+
+ const newFile = await readRc(join(mock.other, 'new-npm'))
+ t.equal(mock.builtinRc.raw, newFile)
})
diff --git a/deps/npm/test/lib/utils/reify-output.js b/deps/npm/test/lib/utils/reify-output.js
index 5d1d5be47efa30..1c6215ab33bef0 100644
--- a/deps/npm/test/lib/utils/reify-output.js
+++ b/deps/npm/test/lib/utils/reify-output.js
@@ -8,6 +8,7 @@ const mockReify = async (t, reify, { command, ...config } = {}) => {
const mock = await mockNpm(t, {
command,
config,
+ setCmd: true,
})
reifyOutput(mock.npm, reify)
diff --git a/deps/npm/test/lib/utils/update-notifier.js b/deps/npm/test/lib/utils/update-notifier.js
index 9c12433a2d1177..cc5348a440e0a7 100644
--- a/deps/npm/test/lib/utils/update-notifier.js
+++ b/deps/npm/test/lib/utils/update-notifier.js
@@ -19,7 +19,8 @@ const runUpdateNotifier = async (t, {
PACOTE_ERROR,
STAT_MTIME = 0,
mocks: _mocks = {},
- command = 'view',
+ command = 'help',
+ prefixDir,
version = CURRENT_VERSION,
argv = [],
...config
@@ -76,6 +77,8 @@ const runUpdateNotifier = async (t, {
command,
mocks,
config,
+ exec: true,
+ prefixDir,
argv,
})
const updateNotifier = tmock(t, '{LIB}/utils/update-notifier.js', mocks)
@@ -106,6 +109,7 @@ t.test('situations in which we do not notify', t => {
t.test('do not suggest update if already updating', async t => {
const { result, MANIFEST_REQUEST } = await runUpdateNotifier(t, {
command: 'install',
+ prefixDir: { 'package.json': `{"name":"${t.testName}"}` },
argv: ['npm'],
global: true,
})
@@ -116,6 +120,7 @@ t.test('situations in which we do not notify', t => {
t.test('do not suggest update if already updating with spec', async t => {
const { result, MANIFEST_REQUEST } = await runUpdateNotifier(t, {
command: 'install',
+ prefixDir: { 'package.json': `{"name":"${t.testName}"}` },
argv: ['npm@latest'],
global: true,
})
diff --git a/deps/simdutf/simdutf.cpp b/deps/simdutf/simdutf.cpp
index 15107075d51b12..712bf0cf85833a 100644
--- a/deps/simdutf/simdutf.cpp
+++ b/deps/simdutf/simdutf.cpp
@@ -1,8 +1,8 @@
-/* auto-generated on 2023-05-22 21:46:48 -0400. Do not edit! */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf.cpp
+/* auto-generated on 2023-06-05 08:58:28 -0400. Do not edit! */
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf.cpp
/* begin file src/simdutf.cpp */
#include "simdutf.h"
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=implementation.cpp
/* begin file src/implementation.cpp */
#include
#include
@@ -26,7 +26,7 @@ std::string toBinaryString(T b) {
// Implementations
// The best choice should always come first!
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64.h
/* begin file src/simdutf/arm64.h */
#ifndef SIMDUTF_ARM64_H
#define SIMDUTF_ARM64_H
@@ -53,7 +53,7 @@ namespace arm64 {
} // namespace arm64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/implementation.h
/* begin file src/simdutf/arm64/implementation.h */
#ifndef SIMDUTF_ARM64_IMPLEMENTATION_H
#define SIMDUTF_ARM64_IMPLEMENTATION_H
@@ -130,14 +130,14 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_ARM64_IMPLEMENTATION_H
/* end file src/simdutf/arm64/implementation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/begin.h
/* begin file src/simdutf/arm64/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "arm64"
// #define SIMDUTF_IMPLEMENTATION arm64
/* end file src/simdutf/arm64/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/intrinsics.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/intrinsics.h
/* begin file src/simdutf/arm64/intrinsics.h */
#ifndef SIMDUTF_ARM64_INTRINSICS_H
#define SIMDUTF_ARM64_INTRINSICS_H
@@ -149,7 +149,7 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_ARM64_INTRINSICS_H
/* end file src/simdutf/arm64/intrinsics.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/bitmanipulation.h
/* begin file src/simdutf/arm64/bitmanipulation.h */
#ifndef SIMDUTF_ARM64_BITMANIPULATION_H
#define SIMDUTF_ARM64_BITMANIPULATION_H
@@ -169,7 +169,7 @@ simdutf_really_inline int count_ones(uint64_t input_num) {
#endif // SIMDUTF_ARM64_BITMANIPULATION_H
/* end file src/simdutf/arm64/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/simd.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/simd.h
/* begin file src/simdutf/arm64/simd.h */
#ifndef SIMDUTF_ARM64_SIMD_H
#define SIMDUTF_ARM64_SIMD_H
@@ -782,7 +782,7 @@ simdutf_really_inline int16x8_t make_int16x8_t(int16_t x1, int16_t x2, int16_t
).to_bitmask();
}
}; // struct simd8x64
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/simd16-inl.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/simd16-inl.h
/* begin file src/simdutf/arm64/simd16-inl.h */
template
struct simd16;
@@ -1095,7 +1095,7 @@ simdutf_really_inline simd16::operator simd16() const { retur
#endif // SIMDUTF_ARM64_SIMD_H
/* end file src/simdutf/arm64/simd.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/end.h
/* begin file src/simdutf/arm64/end.h */
/* end file src/simdutf/arm64/end.h */
@@ -1103,7 +1103,7 @@ simdutf_really_inline simd16::operator simd16() const { retur
#endif // SIMDUTF_ARM64_H
/* end file src/simdutf/arm64.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake.h
/* begin file src/simdutf/icelake.h */
#ifndef SIMDUTF_ICELAKE_H
#define SIMDUTF_ICELAKE_H
@@ -1142,7 +1142,7 @@ simdutf_really_inline simd16::operator simd16() const { retur
#if SIMDUTF_CAN_ALWAYS_RUN_ICELAKE
#define SIMDUTF_TARGET_ICELAKE
#else
-#define SIMDUTF_TARGET_ICELAKE SIMDUTF_TARGET_REGION("avx512f,avx512dq,avx512cd,avx512bw,avx512vbmi,avx512vbmi2,avx512vl,avx2,bmi,bmi2,pclmul,lzcnt")
+#define SIMDUTF_TARGET_ICELAKE SIMDUTF_TARGET_REGION("avx512f,avx512dq,avx512cd,avx512bw,avx512vbmi,avx512vbmi2,avx512vl,avx2,bmi,bmi2,pclmul,lzcnt,popcnt")
#endif
namespace simdutf {
@@ -1155,7 +1155,7 @@ namespace icelake {
//
// These two need to be included outside SIMDUTF_TARGET_REGION
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/intrinsics.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/intrinsics.h
/* begin file src/simdutf/icelake/intrinsics.h */
#ifndef SIMDUTF_ICELAKE_INTRINSICS_H
#define SIMDUTF_ICELAKE_INTRINSICS_H
@@ -1265,7 +1265,7 @@ inline __m512i _mm512_set_epi8(uint8_t a0, uint8_t a1, uint8_t a2, uint8_t a3, u
#endif // SIMDUTF_HASWELL_INTRINSICS_H
/* end file src/simdutf/icelake/intrinsics.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/implementation.h
/* begin file src/simdutf/icelake/implementation.h */
#ifndef SIMDUTF_ICELAKE_IMPLEMENTATION_H
#define SIMDUTF_ICELAKE_IMPLEMENTATION_H
@@ -1348,7 +1348,7 @@ class implementation final : public simdutf::implementation {
//
// The rest need to be inside the region
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/begin.h
/* begin file src/simdutf/icelake/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "icelake"
// #define SIMDUTF_IMPLEMENTATION icelake
@@ -1364,7 +1364,7 @@ SIMDUTF_DISABLE_GCC_WARNING(-Wmaybe-uninitialized)
#endif // end of workaround
/* end file src/simdutf/icelake/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/bitmanipulation.h
/* begin file src/simdutf/icelake/bitmanipulation.h */
#ifndef SIMDUTF_ICELAKE_BITMANIPULATION_H
#define SIMDUTF_ICELAKE_BITMANIPULATION_H
@@ -1390,7 +1390,7 @@ simdutf_really_inline long long int count_ones(uint64_t input_num) {
#endif // SIMDUTF_ICELAKE_BITMANIPULATION_H
/* end file src/simdutf/icelake/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/end.h
/* begin file src/simdutf/icelake/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_ICELAKE
// nothing needed.
@@ -1409,7 +1409,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
#endif // SIMDUTF_IMPLEMENTATION_ICELAKE
#endif // SIMDUTF_ICELAKE_H
/* end file src/simdutf/icelake.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell.h
/* begin file src/simdutf/haswell.h */
#ifndef SIMDUTF_HASWELL_H
#define SIMDUTF_HASWELL_H
@@ -1442,7 +1442,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
#if SIMDUTF_IMPLEMENTATION_HASWELL
-#define SIMDUTF_TARGET_HASWELL SIMDUTF_TARGET_REGION("avx2,bmi,lzcnt")
+#define SIMDUTF_TARGET_HASWELL SIMDUTF_TARGET_REGION("avx2,bmi,lzcnt,popcnt")
namespace simdutf {
/**
@@ -1455,7 +1455,7 @@ namespace haswell {
//
// These two need to be included outside SIMDUTF_TARGET_REGION
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/implementation.h
/* begin file src/simdutf/haswell/implementation.h */
#ifndef SIMDUTF_HASWELL_IMPLEMENTATION_H
#define SIMDUTF_HASWELL_IMPLEMENTATION_H
@@ -1534,7 +1534,7 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_HASWELL_IMPLEMENTATION_H
/* end file src/simdutf/haswell/implementation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/intrinsics.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/intrinsics.h
/* begin file src/simdutf/haswell/intrinsics.h */
#ifndef SIMDUTF_HASWELL_INTRINSICS_H
#define SIMDUTF_HASWELL_INTRINSICS_H
@@ -1603,7 +1603,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
//
// The rest need to be inside the region
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/begin.h
/* begin file src/simdutf/haswell/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "haswell"
// #define SIMDUTF_IMPLEMENTATION haswell
@@ -1619,7 +1619,7 @@ SIMDUTF_DISABLE_GCC_WARNING(-Wmaybe-uninitialized)
#endif // end of workaround
/* end file src/simdutf/haswell/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/bitmanipulation.h
/* begin file src/simdutf/haswell/bitmanipulation.h */
#ifndef SIMDUTF_HASWELL_BITMANIPULATION_H
#define SIMDUTF_HASWELL_BITMANIPULATION_H
@@ -1645,7 +1645,7 @@ simdutf_really_inline long long int count_ones(uint64_t input_num) {
#endif // SIMDUTF_HASWELL_BITMANIPULATION_H
/* end file src/simdutf/haswell/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/simd.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/simd.h
/* begin file src/simdutf/haswell/simd.h */
#ifndef SIMDUTF_HASWELL_SIMD_H
#define SIMDUTF_HASWELL_SIMD_H
@@ -2041,7 +2041,7 @@ namespace simd {
}
}; // struct simd8x64
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/simd16-inl.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/simd16-inl.h
/* begin file src/simdutf/haswell/simd16-inl.h */
#ifdef __GNUC__
#if __GNUC__ < 8
@@ -2320,7 +2320,7 @@ struct simd16: base16_numeric {
#endif // SIMDUTF_HASWELL_SIMD_H
/* end file src/simdutf/haswell/simd.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/end.h
/* begin file src/simdutf/haswell/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_HASWELL
// nothing needed.
@@ -2337,7 +2337,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
#endif // SIMDUTF_IMPLEMENTATION_HASWELL
#endif // SIMDUTF_HASWELL_COMMON_H
/* end file src/simdutf/haswell.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere.h
/* begin file src/simdutf/westmere.h */
#ifndef SIMDUTF_WESTMERE_H
#define SIMDUTF_WESTMERE_H
@@ -2365,7 +2365,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
#if SIMDUTF_IMPLEMENTATION_WESTMERE
-#define SIMDUTF_TARGET_WESTMERE SIMDUTF_TARGET_REGION("sse4.2")
+#define SIMDUTF_TARGET_WESTMERE SIMDUTF_TARGET_REGION("sse4.2,popcnt")
namespace simdutf {
/**
@@ -2378,7 +2378,7 @@ namespace westmere {
//
// These two need to be included outside SIMDUTF_TARGET_REGION
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/implementation.h
/* begin file src/simdutf/westmere/implementation.h */
#ifndef SIMDUTF_WESTMERE_IMPLEMENTATION_H
#define SIMDUTF_WESTMERE_IMPLEMENTATION_H
@@ -2455,7 +2455,7 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_WESTMERE_IMPLEMENTATION_H
/* end file src/simdutf/westmere/implementation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/intrinsics.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/intrinsics.h
/* begin file src/simdutf/westmere/intrinsics.h */
#ifndef SIMDUTF_WESTMERE_INTRINSICS_H
#define SIMDUTF_WESTMERE_INTRINSICS_H
@@ -2504,7 +2504,7 @@ SIMDUTF_POP_DISABLE_WARNINGS
//
// The rest need to be inside the region
//
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/begin.h
/* begin file src/simdutf/westmere/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "westmere"
// #define SIMDUTF_IMPLEMENTATION westmere
@@ -2517,7 +2517,7 @@ SIMDUTF_TARGET_WESTMERE
/* end file src/simdutf/westmere/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/bitmanipulation.h
/* begin file src/simdutf/westmere/bitmanipulation.h */
#ifndef SIMDUTF_WESTMERE_BITMANIPULATION_H
#define SIMDUTF_WESTMERE_BITMANIPULATION_H
@@ -2543,7 +2543,7 @@ simdutf_really_inline long long int count_ones(uint64_t input_num) {
#endif // SIMDUTF_WESTMERE_BITMANIPULATION_H
/* end file src/simdutf/westmere/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/simd.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/simd.h
/* begin file src/simdutf/westmere/simd.h */
#ifndef SIMDUTF_WESTMERE_SIMD_H
#define SIMDUTF_WESTMERE_SIMD_H
@@ -2987,7 +2987,7 @@ namespace simd {
}
}; // struct simd8x64
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/simd16-inl.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/simd16-inl.h
/* begin file src/simdutf/westmere/simd16-inl.h */
template
struct simd16;
@@ -3264,7 +3264,7 @@ template
#endif // SIMDUTF_WESTMERE_SIMD_INPUT_H
/* end file src/simdutf/westmere/simd.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/end.h
/* begin file src/simdutf/westmere/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_WESTMERE
// nothing needed.
@@ -3277,7 +3277,7 @@ SIMDUTF_UNTARGET_REGION
#endif // SIMDUTF_IMPLEMENTATION_WESTMERE
#endif // SIMDUTF_WESTMERE_COMMON_H
/* end file src/simdutf/westmere.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64.h
/* begin file src/simdutf/ppc64.h */
#ifndef SIMDUTF_PPC64_H
#define SIMDUTF_PPC64_H
@@ -3304,7 +3304,7 @@ namespace ppc64 {
} // namespace ppc64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/implementation.h
/* begin file src/simdutf/ppc64/implementation.h */
#ifndef SIMDUTF_PPC64_IMPLEMENTATION_H
#define SIMDUTF_PPC64_IMPLEMENTATION_H
@@ -3383,14 +3383,14 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_PPC64_IMPLEMENTATION_H
/* end file src/simdutf/ppc64/implementation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/begin.h
/* begin file src/simdutf/ppc64/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "ppc64"
// #define SIMDUTF_IMPLEMENTATION ppc64
/* end file src/simdutf/ppc64/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/intrinsics.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/intrinsics.h
/* begin file src/simdutf/ppc64/intrinsics.h */
#ifndef SIMDUTF_PPC64_INTRINSICS_H
#define SIMDUTF_PPC64_INTRINSICS_H
@@ -3411,7 +3411,7 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_PPC64_INTRINSICS_H
/* end file src/simdutf/ppc64/intrinsics.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/bitmanipulation.h
/* begin file src/simdutf/ppc64/bitmanipulation.h */
#ifndef SIMDUTF_PPC64_BITMANIPULATION_H
#define SIMDUTF_PPC64_BITMANIPULATION_H
@@ -3437,7 +3437,7 @@ simdutf_really_inline int count_ones(uint64_t input_num) {
#endif // SIMDUTF_PPC64_BITMANIPULATION_H
/* end file src/simdutf/ppc64/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/simd.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/simd.h
/* begin file src/simdutf/ppc64/simd.h */
#ifndef SIMDUTF_PPC64_SIMD_H
#define SIMDUTF_PPC64_SIMD_H
@@ -3929,7 +3929,7 @@ template struct simd8x64 {
#endif // SIMDUTF_PPC64_SIMD_INPUT_H
/* end file src/simdutf/ppc64/simd.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/end.h
/* begin file src/simdutf/ppc64/end.h */
/* end file src/simdutf/ppc64/end.h */
@@ -3937,7 +3937,7 @@ template struct simd8x64 {
#endif // SIMDUTF_PPC64_H
/* end file src/simdutf/ppc64.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback.h
/* begin file src/simdutf/fallback.h */
#ifndef SIMDUTF_FALLBACK_H
#define SIMDUTF_FALLBACK_H
@@ -3966,7 +3966,7 @@ namespace fallback {
} // namespace fallback
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/implementation.h
/* begin file src/simdutf/fallback/implementation.h */
#ifndef SIMDUTF_FALLBACK_IMPLEMENTATION_H
#define SIMDUTF_FALLBACK_IMPLEMENTATION_H
@@ -4047,14 +4047,14 @@ class implementation final : public simdutf::implementation {
#endif // SIMDUTF_FALLBACK_IMPLEMENTATION_H
/* end file src/simdutf/fallback/implementation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/begin.h
/* begin file src/simdutf/fallback/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "fallback"
// #define SIMDUTF_IMPLEMENTATION fallback
/* end file src/simdutf/fallback/begin.h */
// Declarations
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/bitmanipulation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/bitmanipulation.h
/* begin file src/simdutf/fallback/bitmanipulation.h */
#ifndef SIMDUTF_FALLBACK_BITMANIPULATION_H
#define SIMDUTF_FALLBACK_BITMANIPULATION_H
@@ -4089,7 +4089,7 @@ static unsigned char _BitScanReverse64(unsigned long* ret, uint64_t x) {
#endif // SIMDUTF_FALLBACK_BITMANIPULATION_H
/* end file src/simdutf/fallback/bitmanipulation.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/end.h
/* begin file src/simdutf/fallback/end.h */
/* end file src/simdutf/fallback/end.h */
@@ -4978,7 +4978,7 @@ const implementation * builtin_implementation() {
} // namespace simdutf
/* end file src/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=encoding_types.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=encoding_types.cpp
/* begin file src/encoding_types.cpp */
namespace simdutf {
@@ -5040,7 +5040,7 @@ encoding_type check_bom(const char* byte, size_t length) {
}
}
/* end file src/encoding_types.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=error.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=error.cpp
/* begin file src/error.cpp */
namespace simdutf {
@@ -5052,7 +5052,7 @@ namespace simdutf {
/* end file src/error.cpp */
// The large tables should be included once and they
// should not depend on a kernel.
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=tables/utf8_to_utf16_tables.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=tables/utf8_to_utf16_tables.h
/* begin file src/tables/utf8_to_utf16_tables.h */
#ifndef SIMDUTF_UTF8_TO_UTF16_TABLES_H
#define SIMDUTF_UTF8_TO_UTF16_TABLES_H
@@ -9391,7 +9391,7 @@ const uint8_t utf8bigindex[4096][2] =
#endif // SIMDUTF_UTF8_TO_UTF16_TABLES_H
/* end file src/tables/utf8_to_utf16_tables.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=tables/utf16_to_utf8_tables.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=tables/utf16_to_utf8_tables.h
/* begin file src/tables/utf16_to_utf8_tables.h */
// file generated by scripts/sse_convert_utf16_to_utf8.py
#ifndef SIMDUTF_UTF16_TO_UTF8_TABLES_H
@@ -9932,7 +9932,7 @@ namespace utf16_to_utf8 {
// End of tables.
// The scalar routines should be included once.
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/ascii.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/ascii.h
/* begin file src/scalar/ascii.h */
#ifndef SIMDUTF_ASCII_H
#define SIMDUTF_ASCII_H
@@ -9993,7 +9993,7 @@ inline simdutf_warn_unused result validate_with_errors(const char *buf, size_t l
#endif
/* end file src/scalar/ascii.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf8.h
/* begin file src/scalar/utf8.h */
#ifndef SIMDUTF_UTF8_H
#define SIMDUTF_UTF8_H
@@ -10183,7 +10183,7 @@ inline size_t utf16_length_from_utf8(const char* buf, size_t len) {
#endif
/* end file src/scalar/utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf16.h
/* begin file src/scalar/utf16.h */
#ifndef SIMDUTF_UTF16_H
#define SIMDUTF_UTF16_H
@@ -10297,7 +10297,7 @@ simdutf_really_inline void change_endianness_utf16(const char16_t* in, size_t si
#endif
/* end file src/scalar/utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf32.h
/* begin file src/scalar/utf32.h */
#ifndef SIMDUTF_UTF32_H
#define SIMDUTF_UTF32_H
@@ -10372,7 +10372,7 @@ inline size_t utf16_length_from_utf32(const char32_t* buf, size_t len) {
#endif
/* end file src/scalar/utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf8/valid_utf32_to_utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf8/valid_utf32_to_utf8.h
/* begin file src/scalar/utf32_to_utf8/valid_utf32_to_utf8.h */
#ifndef SIMDUTF_VALID_UTF32_TO_UTF8_H
#define SIMDUTF_VALID_UTF32_TO_UTF8_H
@@ -10439,7 +10439,7 @@ inline size_t convert_valid(const char32_t* buf, size_t len, char* utf8_output)
#endif
/* end file src/scalar/utf32_to_utf8/valid_utf32_to_utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf8/utf32_to_utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf8/utf32_to_utf8.h
/* begin file src/scalar/utf32_to_utf8/utf32_to_utf8.h */
#ifndef SIMDUTF_UTF32_TO_UTF8_H
#define SIMDUTF_UTF32_TO_UTF8_H
@@ -10555,7 +10555,7 @@ inline result convert_with_errors(const char32_t* buf, size_t len, char* utf8_ou
#endif
/* end file src/scalar/utf32_to_utf8/utf32_to_utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf16/valid_utf32_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf16/valid_utf32_to_utf16.h
/* begin file src/scalar/utf32_to_utf16/valid_utf32_to_utf16.h */
#ifndef SIMDUTF_VALID_UTF32_TO_UTF16_H
#define SIMDUTF_VALID_UTF32_TO_UTF16_H
@@ -10600,7 +10600,7 @@ inline size_t convert_valid(const char32_t* buf, size_t len, char16_t* utf16_out
#endif
/* end file src/scalar/utf32_to_utf16/valid_utf32_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf16/utf32_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf32_to_utf16/utf32_to_utf16.h
/* begin file src/scalar/utf32_to_utf16/utf32_to_utf16.h */
#ifndef SIMDUTF_UTF32_TO_UTF16_H
#define SIMDUTF_UTF32_TO_UTF16_H
@@ -10676,7 +10676,7 @@ inline result convert_with_errors(const char32_t* buf, size_t len, char16_t* utf
#endif
/* end file src/scalar/utf32_to_utf16/utf32_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf8/valid_utf16_to_utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf8/valid_utf16_to_utf8.h
/* begin file src/scalar/utf16_to_utf8/valid_utf16_to_utf8.h */
#ifndef SIMDUTF_VALID_UTF16_TO_UTF8_H
#define SIMDUTF_VALID_UTF16_TO_UTF8_H
@@ -10751,7 +10751,7 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char* utf8_output)
#endif
/* end file src/scalar/utf16_to_utf8/valid_utf16_to_utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf8/utf16_to_utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf8/utf16_to_utf8.h
/* begin file src/scalar/utf16_to_utf8/utf16_to_utf8.h */
#ifndef SIMDUTF_UTF16_TO_UTF8_H
#define SIMDUTF_UTF16_TO_UTF8_H
@@ -10887,7 +10887,7 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char* utf8_ou
#endif
/* end file src/scalar/utf16_to_utf8/utf16_to_utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf32/valid_utf16_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf32/valid_utf16_to_utf32.h
/* begin file src/scalar/utf16_to_utf32/valid_utf16_to_utf32.h */
#ifndef SIMDUTF_VALID_UTF16_TO_UTF32_H
#define SIMDUTF_VALID_UTF16_TO_UTF32_H
@@ -10929,7 +10929,7 @@ inline size_t convert_valid(const char16_t* buf, size_t len, char32_t* utf32_out
#endif
/* end file src/scalar/utf16_to_utf32/valid_utf16_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf32/utf16_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf16_to_utf32/utf16_to_utf32.h
/* begin file src/scalar/utf16_to_utf32/utf16_to_utf32.h */
#ifndef SIMDUTF_UTF16_TO_UTF32_H
#define SIMDUTF_UTF16_TO_UTF32_H
@@ -11001,7 +11001,7 @@ inline result convert_with_errors(const char16_t* buf, size_t len, char32_t* utf
#endif
/* end file src/scalar/utf16_to_utf32/utf16_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf16/valid_utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf16/valid_utf8_to_utf16.h
/* begin file src/scalar/utf8_to_utf16/valid_utf8_to_utf16.h */
#ifndef SIMDUTF_VALID_UTF8_TO_UTF16_H
#define SIMDUTF_VALID_UTF8_TO_UTF16_H
@@ -11086,7 +11086,7 @@ inline size_t convert_valid(const char* buf, size_t len, char16_t* utf16_output)
#endif
/* end file src/scalar/utf8_to_utf16/valid_utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf16/utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf16/utf8_to_utf16.h
/* begin file src/scalar/utf8_to_utf16/utf8_to_utf16.h */
#ifndef SIMDUTF_UTF8_TO_UTF16_H
#define SIMDUTF_UTF8_TO_UTF16_H
@@ -11336,7 +11336,7 @@ inline result rewind_and_convert_with_errors(size_t prior_bytes, const char* buf
#endif
/* end file src/scalar/utf8_to_utf16/utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf32/valid_utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf32/valid_utf8_to_utf32.h
/* begin file src/scalar/utf8_to_utf32/valid_utf8_to_utf32.h */
#ifndef SIMDUTF_VALID_UTF8_TO_UTF32_H
#define SIMDUTF_VALID_UTF8_TO_UTF32_H
@@ -11402,7 +11402,7 @@ inline size_t convert_valid(const char* buf, size_t len, char32_t* utf32_output)
#endif
/* end file src/scalar/utf8_to_utf32/valid_utf8_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf32/utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=scalar/utf8_to_utf32/utf8_to_utf32.h
/* begin file src/scalar/utf8_to_utf32/utf8_to_utf32.h */
#ifndef SIMDUTF_UTF8_TO_UTF32_H
#define SIMDUTF_UTF8_TO_UTF32_H
@@ -11622,9 +11622,9 @@ SIMDUTF_DISABLE_UNDESIRED_WARNINGS
#if SIMDUTF_IMPLEMENTATION_ARM64
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/implementation.cpp
/* begin file src/arm64/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/begin.h
/* begin file src/simdutf/arm64/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "arm64"
// #define SIMDUTF_IMPLEMENTATION arm64
@@ -11660,7 +11660,7 @@ simdutf_really_inline simd8 must_be_2_3_continuation(const simd8
return is_third_byte ^ is_fourth_byte;
}
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_detect_encodings.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_detect_encodings.cpp
/* begin file src/arm64/arm_detect_encodings.cpp */
template
// len is known to be a multiple of 2 when this is called
@@ -11868,7 +11868,7 @@ int arm_detect_encodings(const char * buf, size_t len) {
}
/* end file src/arm64/arm_detect_encodings.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_validate_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_validate_utf16.cpp
/* begin file src/arm64/arm_validate_utf16.cpp */
template
const char16_t* arm_validate_utf16(const char16_t* input, size_t size) {
@@ -12018,7 +12018,7 @@ const result arm_validate_utf16_with_errors(const char16_t* input, size_t size)
return result(error_code::SUCCESS, input - start);
}
/* end file src/arm64/arm_validate_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_validate_utf32le.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_validate_utf32le.cpp
/* begin file src/arm64/arm_validate_utf32le.cpp */
const char32_t* arm_validate_utf32le(const char32_t* input, size_t size) {
@@ -12083,7 +12083,7 @@ const result arm_validate_utf32le_with_errors(const char32_t* input, size_t size
}
/* end file src/arm64/arm_validate_utf32le.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf8_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf8_to_utf16.cpp
/* begin file src/arm64/arm_convert_utf8_to_utf16.cpp */
// Convert up to 12 bytes from utf8 to utf16 using a mask indicating the
// end of the code points. Only the least significant 12 bits of the mask
@@ -12270,7 +12270,7 @@ size_t convert_masked_utf8_to_utf16(const char *input,
return consumed;
}
/* end file src/arm64/arm_convert_utf8_to_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf8_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf8_to_utf32.cpp
/* begin file src/arm64/arm_convert_utf8_to_utf32.cpp */
// Convert up to 12 bytes from utf8 to utf32 using a mask indicating the
// end of the code points. Only the least significant 12 bits of the mask
@@ -12407,7 +12407,7 @@ size_t convert_masked_utf8_to_utf32(const char *input,
}
/* end file src/arm64/arm_convert_utf8_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf16_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf16_to_utf8.cpp
/* begin file src/arm64/arm_convert_utf16_to_utf8.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -12987,7 +12987,7 @@ std::pair arm_convert_utf16_to_utf8_with_errors(const char16_t* b
return std::make_pair(result(error_code::SUCCESS, buf - start), reinterpret_cast(utf8_output));
}
/* end file src/arm64/arm_convert_utf16_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf16_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf16_to_utf32.cpp
/* begin file src/arm64/arm_convert_utf16_to_utf32.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -13164,7 +13164,7 @@ std::pair arm_convert_utf16_to_utf32_with_errors(const char16
}
/* end file src/arm64/arm_convert_utf16_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf32_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf32_to_utf8.cpp
/* begin file src/arm64/arm_convert_utf32_to_utf8.cpp */
std::pair arm_convert_utf32_to_utf8(const char32_t* buf, size_t len, char* utf8_out) {
uint8_t * utf8_output = reinterpret_cast(utf8_out);
@@ -13636,7 +13636,7 @@ std::pair arm_convert_utf32_to_utf8_with_errors(const char32_t* b
return std::make_pair(result(error_code::SUCCESS, buf - start), reinterpret_cast(utf8_output));
}
/* end file src/arm64/arm_convert_utf32_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf32_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=arm64/arm_convert_utf32_to_utf16.cpp
/* begin file src/arm64/arm_convert_utf32_to_utf16.cpp */
template
std::pair arm_convert_utf32_to_utf16(const char32_t* buf, size_t len, char16_t* utf16_out) {
@@ -13769,7 +13769,7 @@ std::pair arm_convert_utf32_to_utf16_with_errors(const char32
} // unnamed namespace
} // namespace arm64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
/* begin file src/generic/buf_block_reader.h */
namespace simdutf {
namespace arm64 {
@@ -13864,7 +13864,7 @@ simdutf_really_inline void buf_block_reader::advance() {
} // namespace arm64
} // namespace simdutf
/* end file src/generic/buf_block_reader.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
/* begin file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
namespace simdutf {
namespace arm64 {
@@ -14053,7 +14053,7 @@ using utf8_validation::utf8_checker;
} // namespace arm64
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
/* begin file src/generic/utf8_validation/utf8_validator.h */
namespace simdutf {
namespace arm64 {
@@ -14180,7 +14180,7 @@ result generic_validate_ascii_with_errors(const char * input, size_t length) {
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_validator.h */
// transcoding from UTF-8 to UTF-16
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
@@ -14255,7 +14255,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace arm64
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/utf8_to_utf16.h */
@@ -14563,7 +14563,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/utf8_to_utf16.h */
// transcoding from UTF-8 to UTF-32
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
namespace simdutf {
@@ -14609,7 +14609,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace arm64
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/utf8_to_utf32.h */
@@ -14910,7 +14910,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/utf8_to_utf32.h */
// other functions
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8.h
/* begin file src/generic/utf8.h */
namespace simdutf {
@@ -14957,7 +14957,7 @@ simdutf_really_inline size_t utf32_length_from_utf8(const char* in, size_t size)
} // namespace arm64
} // namespace simdutf
/* end file src/generic/utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf16.h
/* begin file src/generic/utf16.h */
namespace simdutf {
namespace arm64 {
@@ -15492,15 +15492,15 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace arm64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/arm64/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/arm64/end.h
/* begin file src/simdutf/arm64/end.h */
/* end file src/simdutf/arm64/end.h */
/* end file src/arm64/implementation.cpp */
#endif
#if SIMDUTF_IMPLEMENTATION_FALLBACK
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=fallback/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=fallback/implementation.cpp
/* begin file src/fallback/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/begin.h
/* begin file src/simdutf/fallback/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "fallback"
// #define SIMDUTF_IMPLEMENTATION fallback
@@ -15744,17 +15744,17 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace fallback
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/fallback/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/fallback/end.h
/* begin file src/simdutf/fallback/end.h */
/* end file src/simdutf/fallback/end.h */
/* end file src/fallback/implementation.cpp */
#endif
#if SIMDUTF_IMPLEMENTATION_ICELAKE
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/implementation.cpp
/* begin file src/icelake/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/begin.h
/* begin file src/simdutf/icelake/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "icelake"
// #define SIMDUTF_IMPLEMENTATION icelake
@@ -15775,7 +15775,7 @@ namespace {
#ifndef SIMDUTF_ICELAKE_H
#error "icelake.h must be included"
#endif
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_utf8_common.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_utf8_common.inl.cpp
/* begin file src/icelake/icelake_utf8_common.inl.cpp */
// Common procedures for both validating and non-validating conversions from UTF-8.
enum block_processing_mode { SIMDUTF_FULL, SIMDUTF_TAIL};
@@ -16440,7 +16440,7 @@ simdutf_really_inline __m512i expand_utf8_to_utf32(__m512i input) {
return expanded_utf8_to_utf32(char_class, input);
}
/* end file src/icelake/icelake_utf8_common.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_macros.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_macros.inl.cpp
/* begin file src/icelake/icelake_macros.inl.cpp */
/*
@@ -16576,7 +16576,7 @@ simdutf_really_inline __m512i expand_utf8_to_utf32(__m512i input) {
} \
}
/* end file src/icelake/icelake_macros.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_from_valid_utf8.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_from_valid_utf8.inl.cpp
/* begin file src/icelake/icelake_from_valid_utf8.inl.cpp */
// file included directly
@@ -16715,7 +16715,7 @@ std::pair valid_utf8_to_fixed_length(const char* str, size
using utf8_to_utf16_result = std::pair;
/* end file src/icelake/icelake_from_valid_utf8.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_utf8_validation.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_utf8_validation.inl.cpp
/* begin file src/icelake/icelake_utf8_validation.inl.cpp */
// file included directly
@@ -16845,7 +16845,7 @@ simdutf_really_inline __m512i check_special_cases(__m512i input, const __m512i p
}; // struct avx512_utf8_checker
/* end file src/icelake/icelake_utf8_validation.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_from_utf8.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_from_utf8.inl.cpp
/* begin file src/icelake/icelake_from_utf8.inl.cpp */
// file included directly
@@ -17148,7 +17148,7 @@ std::tuple validating_utf8_to_fixed_length_with_cons
return {ptr, output, true};
}
/* end file src/icelake/icelake_from_utf8.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf16_to_utf32.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf16_to_utf32.inl.cpp
/* begin file src/icelake/icelake_convert_utf16_to_utf32.inl.cpp */
// file included directly
@@ -17260,7 +17260,7 @@ std::tuple convert_utf16_to_utf32(const char16
return std::make_tuple(buf+carry, utf32_output, true);
}
/* end file src/icelake/icelake_convert_utf16_to_utf32.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf32_to_utf8.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf32_to_utf8.inl.cpp
/* begin file src/icelake/icelake_convert_utf32_to_utf8.inl.cpp */
// file included directly
@@ -17746,7 +17746,7 @@ std::pair avx512_convert_utf32_to_utf8_with_errors(const char32_t
return std::make_pair(result(error_code::SUCCESS, buf - start), utf8_output);
}
/* end file src/icelake/icelake_convert_utf32_to_utf8.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf32_to_utf16.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf32_to_utf16.inl.cpp
/* begin file src/icelake/icelake_convert_utf32_to_utf16.inl.cpp */
// file included directly
@@ -17881,7 +17881,7 @@ std::pair avx512_convert_utf32_to_utf16_with_errors(const cha
return std::make_pair(result(error_code::SUCCESS, buf - start), utf16_output);
}
/* end file src/icelake/icelake_convert_utf32_to_utf16.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_ascii_validation.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_ascii_validation.inl.cpp
/* begin file src/icelake/icelake_ascii_validation.inl.cpp */
// file included directly
@@ -17900,7 +17900,7 @@ bool validate_ascii(const char* buf, size_t len) {
return (_mm512_test_epi8_mask(running_or, running_or) == 0);
}
/* end file src/icelake/icelake_ascii_validation.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_utf32_validation.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_utf32_validation.inl.cpp
/* begin file src/icelake/icelake_utf32_validation.inl.cpp */
// file included directly
@@ -17932,7 +17932,7 @@ const char32_t* validate_utf32(const char32_t* buf, size_t len) {
return buf;
}
/* end file src/icelake/icelake_utf32_validation.inl.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf16_to_utf8.inl.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=icelake/icelake_convert_utf16_to_utf8.inl.cpp
/* begin file src/icelake/icelake_convert_utf16_to_utf8.inl.cpp */
// file included directly
@@ -19244,7 +19244,7 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace icelake
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/icelake/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/icelake/end.h
/* begin file src/simdutf/icelake/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_ICELAKE
// nothing needed.
@@ -19260,10 +19260,10 @@ SIMDUTF_POP_DISABLE_WARNINGS
/* end file src/icelake/implementation.cpp */
#endif
#if SIMDUTF_IMPLEMENTATION_HASWELL
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/implementation.cpp
/* begin file src/haswell/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/begin.h
/* begin file src/simdutf/haswell/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "haswell"
// #define SIMDUTF_IMPLEMENTATION haswell
@@ -19306,7 +19306,7 @@ simdutf_really_inline simd8 must_be_2_3_continuation(const simd8
return simd8(is_third_byte | is_fourth_byte) > int8_t(0);
}
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_detect_encodings.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_detect_encodings.cpp
/* begin file src/haswell/avx2_detect_encodings.cpp */
template
// len is known to be a multiple of 2 when this is called
@@ -19496,7 +19496,7 @@ int avx2_detect_encodings(const char * buf, size_t len) {
}
/* end file src/haswell/avx2_detect_encodings.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_validate_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_validate_utf16.cpp
/* begin file src/haswell/avx2_validate_utf16.cpp */
/*
In UTF-16 words in range 0xD800 to 0xDFFF have special meaning.
@@ -19697,7 +19697,7 @@ const result avx2_validate_utf16_with_errors(const char16_t* input, size_t size)
return result(error_code::SUCCESS, input - start);
}
/* end file src/haswell/avx2_validate_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_validate_utf32le.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_validate_utf32le.cpp
/* begin file src/haswell/avx2_validate_utf32le.cpp */
/* Returns:
- pointer to the last unprocessed character (a scalar fallback should check the rest);
@@ -19763,7 +19763,7 @@ const result avx2_validate_utf32le_with_errors(const char32_t* input, size_t siz
}
/* end file src/haswell/avx2_validate_utf32le.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf8_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf8_to_utf16.cpp
/* begin file src/haswell/avx2_convert_utf8_to_utf16.cpp */
// depends on "tables/utf8_to_utf16_tables.h"
@@ -19946,7 +19946,7 @@ size_t convert_masked_utf8_to_utf16(const char *input,
return consumed;
}
/* end file src/haswell/avx2_convert_utf8_to_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf8_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf8_to_utf32.cpp
/* begin file src/haswell/avx2_convert_utf8_to_utf32.cpp */
// depends on "tables/utf8_to_utf16_tables.h"
@@ -20075,7 +20075,7 @@ size_t convert_masked_utf8_to_utf32(const char *input,
}
/* end file src/haswell/avx2_convert_utf8_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf16_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf16_to_utf8.cpp
/* begin file src/haswell/avx2_convert_utf16_to_utf8.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -20608,7 +20608,7 @@ std::pair avx2_convert_utf16_to_utf8_with_errors(const char16_t*
return std::make_pair(result(error_code::SUCCESS, buf - start), utf8_output);
}
/* end file src/haswell/avx2_convert_utf16_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf16_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf16_to_utf32.cpp
/* begin file src/haswell/avx2_convert_utf16_to_utf32.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -20793,7 +20793,7 @@ std::pair avx2_convert_utf16_to_utf32_with_errors(const char1
}
/* end file src/haswell/avx2_convert_utf16_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf32_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf32_to_utf8.cpp
/* begin file src/haswell/avx2_convert_utf32_to_utf8.cpp */
std::pair avx2_convert_utf32_to_utf8(const char32_t* buf, size_t len, char* utf8_output) {
const char32_t* end = buf + len;
@@ -21276,7 +21276,7 @@ std::pair avx2_convert_utf32_to_utf8_with_errors(const char32_t*
return std::make_pair(result(error_code::SUCCESS, buf - start), utf8_output);
}
/* end file src/haswell/avx2_convert_utf32_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf32_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=haswell/avx2_convert_utf32_to_utf16.cpp
/* begin file src/haswell/avx2_convert_utf32_to_utf16.cpp */
template
std::pair avx2_convert_utf32_to_utf16(const char32_t* buf, size_t len, char16_t* utf16_output) {
@@ -21412,7 +21412,7 @@ std::pair avx2_convert_utf32_to_utf16_with_errors(const char3
} // namespace haswell
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
/* begin file src/generic/buf_block_reader.h */
namespace simdutf {
namespace haswell {
@@ -21507,7 +21507,7 @@ simdutf_really_inline void buf_block_reader::advance() {
} // namespace haswell
} // namespace simdutf
/* end file src/generic/buf_block_reader.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
/* begin file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
namespace simdutf {
namespace haswell {
@@ -21696,7 +21696,7 @@ using utf8_validation::utf8_checker;
} // namespace haswell
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
/* begin file src/generic/utf8_validation/utf8_validator.h */
namespace simdutf {
namespace haswell {
@@ -21823,7 +21823,7 @@ result generic_validate_ascii_with_errors(const char * input, size_t length) {
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_validator.h */
// transcoding from UTF-8 to UTF-16
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
@@ -21898,7 +21898,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace haswell
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/utf8_to_utf16.h */
@@ -22206,7 +22206,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/utf8_to_utf16.h */
// transcoding from UTF-8 to UTF-32
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
namespace simdutf {
@@ -22252,7 +22252,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace haswell
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/utf8_to_utf32.h */
@@ -22553,7 +22553,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/utf8_to_utf32.h */
// other functions
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8.h
/* begin file src/generic/utf8.h */
namespace simdutf {
@@ -22600,7 +22600,7 @@ simdutf_really_inline size_t utf32_length_from_utf8(const char* in, size_t size)
} // namespace haswell
} // namespace simdutf
/* end file src/generic/utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf16.h
/* begin file src/generic/utf16.h */
namespace simdutf {
namespace haswell {
@@ -23128,7 +23128,7 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace haswell
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/haswell/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/haswell/end.h
/* begin file src/simdutf/haswell/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_HASWELL
// nothing needed.
@@ -23144,14 +23144,14 @@ SIMDUTF_POP_DISABLE_WARNINGS
/* end file src/haswell/implementation.cpp */
#endif
#if SIMDUTF_IMPLEMENTATION_PPC64
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=ppc64/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=ppc64/implementation.cpp
/* begin file src/ppc64/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/begin.h
/* begin file src/simdutf/ppc64/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "ppc64"
// #define SIMDUTF_IMPLEMENTATION ppc64
@@ -23189,7 +23189,7 @@ simdutf_really_inline simd8 must_be_2_3_continuation(const simd8
} // namespace ppc64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
/* begin file src/generic/buf_block_reader.h */
namespace simdutf {
namespace ppc64 {
@@ -23284,7 +23284,7 @@ simdutf_really_inline void buf_block_reader::advance() {
} // namespace ppc64
} // namespace simdutf
/* end file src/generic/buf_block_reader.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
/* begin file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
namespace simdutf {
namespace ppc64 {
@@ -23473,7 +23473,7 @@ using utf8_validation::utf8_checker;
} // namespace ppc64
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
/* begin file src/generic/utf8_validation/utf8_validator.h */
namespace simdutf {
namespace ppc64 {
@@ -23600,7 +23600,7 @@ result generic_validate_ascii_with_errors(const char * input, size_t length) {
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_validator.h */
// transcoding from UTF-8 to UTF-16
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
@@ -23675,7 +23675,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace ppc64
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/utf8_to_utf16.h */
@@ -23983,7 +23983,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/utf8_to_utf16.h */
// transcoding from UTF-8 to UTF-32
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
namespace simdutf {
@@ -24029,7 +24029,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace ppc64
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/utf8_to_utf32.h */
@@ -24330,7 +24330,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/utf8_to_utf32.h */
// other functions
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8.h
/* begin file src/generic/utf8.h */
namespace simdutf {
@@ -24377,7 +24377,7 @@ simdutf_really_inline size_t utf32_length_from_utf8(const char* in, size_t size)
} // namespace ppc64
} // namespace simdutf
/* end file src/generic/utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf16.h
/* begin file src/generic/utf16.h */
namespace simdutf {
namespace ppc64 {
@@ -24676,15 +24676,15 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace ppc64
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/ppc64/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/ppc64/end.h
/* begin file src/simdutf/ppc64/end.h */
/* end file src/simdutf/ppc64/end.h */
/* end file src/ppc64/implementation.cpp */
#endif
#if SIMDUTF_IMPLEMENTATION_WESTMERE
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/implementation.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/implementation.cpp
/* begin file src/westmere/implementation.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/begin.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/begin.h
/* begin file src/simdutf/westmere/begin.h */
// redefining SIMDUTF_IMPLEMENTATION to "westmere"
// #define SIMDUTF_IMPLEMENTATION westmere
@@ -24722,7 +24722,7 @@ simdutf_really_inline simd8 must_be_2_3_continuation(const simd8
return simd8(is_third_byte | is_fourth_byte) > int8_t(0);
}
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_detect_encodings.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_detect_encodings.cpp
/* begin file src/westmere/sse_detect_encodings.cpp */
template
// len is known to be a multiple of 2 when this is called
@@ -24932,7 +24932,7 @@ int sse_detect_encodings(const char * buf, size_t len) {
}
/* end file src/westmere/sse_detect_encodings.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_validate_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_validate_utf16.cpp
/* begin file src/westmere/sse_validate_utf16.cpp */
/*
In UTF-16 words in range 0xD800 to 0xDFFF have special meaning.
@@ -25132,7 +25132,7 @@ const result sse_validate_utf16_with_errors(const char16_t* input, size_t size)
return result(error_code::SUCCESS, input - start);
}
/* end file src/westmere/sse_validate_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_validate_utf32le.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_validate_utf32le.cpp
/* begin file src/westmere/sse_validate_utf32le.cpp */
/* Returns:
- pointer to the last unprocessed character (a scalar fallback should check the rest);
@@ -25198,7 +25198,7 @@ const result sse_validate_utf32le_with_errors(const char32_t* input, size_t size
}
/* end file src/westmere/sse_validate_utf32le.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf8_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf8_to_utf16.cpp
/* begin file src/westmere/sse_convert_utf8_to_utf16.cpp */
// depends on "tables/utf8_to_utf16_tables.h"
@@ -25383,7 +25383,7 @@ size_t convert_masked_utf8_to_utf16(const char *input,
return consumed;
}
/* end file src/westmere/sse_convert_utf8_to_utf16.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf8_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf8_to_utf32.cpp
/* begin file src/westmere/sse_convert_utf8_to_utf32.cpp */
// depends on "tables/utf8_to_utf16_tables.h"
@@ -25515,7 +25515,7 @@ size_t convert_masked_utf8_to_utf32(const char *input,
}
/* end file src/westmere/sse_convert_utf8_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf16_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf16_to_utf8.cpp
/* begin file src/westmere/sse_convert_utf16_to_utf8.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -26043,7 +26043,7 @@ std::pair sse_convert_utf16_to_utf8_with_errors(const char16_t* b
return std::make_pair(result(error_code::SUCCESS, buf - start), utf8_output);
}
/* end file src/westmere/sse_convert_utf16_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf16_to_utf32.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf16_to_utf32.cpp
/* begin file src/westmere/sse_convert_utf16_to_utf32.cpp */
/*
The vectorized algorithm works on single SSE register i.e., it
@@ -26227,7 +26227,7 @@ std::pair sse_convert_utf16_to_utf32_with_errors(const char16
}
/* end file src/westmere/sse_convert_utf16_to_utf32.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf32_to_utf8.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf32_to_utf8.cpp
/* begin file src/westmere/sse_convert_utf32_to_utf8.cpp */
std::pair sse_convert_utf32_to_utf8(const char32_t* buf, size_t len, char* utf8_output) {
const char32_t* end = buf + len;
@@ -26728,7 +26728,7 @@ std::pair sse_convert_utf32_to_utf8_with_errors(const char32_t* b
return std::make_pair(result(error_code::SUCCESS, buf - start), utf8_output);
}
/* end file src/westmere/sse_convert_utf32_to_utf8.cpp */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf32_to_utf16.cpp
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=westmere/sse_convert_utf32_to_utf16.cpp
/* begin file src/westmere/sse_convert_utf32_to_utf16.cpp */
template
std::pair sse_convert_utf32_to_utf16(const char32_t* buf, size_t len, char16_t* utf16_output) {
@@ -26867,7 +26867,7 @@ std::pair sse_convert_utf32_to_utf16_with_errors(const char32
} // namespace westmere
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/buf_block_reader.h
/* begin file src/generic/buf_block_reader.h */
namespace simdutf {
namespace westmere {
@@ -26962,7 +26962,7 @@ simdutf_really_inline void buf_block_reader::advance() {
} // namespace westmere
} // namespace simdutf
/* end file src/generic/buf_block_reader.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_lookup4_algorithm.h
/* begin file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
namespace simdutf {
namespace westmere {
@@ -27151,7 +27151,7 @@ using utf8_validation::utf8_checker;
} // namespace westmere
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_lookup4_algorithm.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_validation/utf8_validator.h
/* begin file src/generic/utf8_validation/utf8_validator.h */
namespace simdutf {
namespace westmere {
@@ -27278,7 +27278,7 @@ result generic_validate_ascii_with_errors(const char * input, size_t length) {
} // namespace simdutf
/* end file src/generic/utf8_validation/utf8_validator.h */
// transcoding from UTF-8 to UTF-16
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/valid_utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
@@ -27353,7 +27353,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace westmere
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/valid_utf8_to_utf16.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf16/utf8_to_utf16.h
/* begin file src/generic/utf8_to_utf16/utf8_to_utf16.h */
@@ -27661,7 +27661,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf16/utf8_to_utf16.h */
// transcoding from UTF-8 to UTF-32
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/valid_utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
namespace simdutf {
@@ -27707,7 +27707,7 @@ simdutf_warn_unused size_t convert_valid(const char* input, size_t size,
} // namespace westmere
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/valid_utf8_to_utf32.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8_to_utf32/utf8_to_utf32.h
/* begin file src/generic/utf8_to_utf32/utf8_to_utf32.h */
@@ -28008,7 +28008,7 @@ using namespace simd;
} // namespace simdutf
/* end file src/generic/utf8_to_utf32/utf8_to_utf32.h */
// other functions
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf8.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf8.h
/* begin file src/generic/utf8.h */
namespace simdutf {
@@ -28055,7 +28055,7 @@ simdutf_really_inline size_t utf32_length_from_utf8(const char* in, size_t size)
} // namespace westmere
} // namespace simdutf
/* end file src/generic/utf8.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=generic/utf16.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=generic/utf16.h
/* begin file src/generic/utf16.h */
namespace simdutf {
namespace westmere {
@@ -28587,7 +28587,7 @@ simdutf_warn_unused size_t implementation::utf32_length_from_utf8(const char * i
} // namespace westmere
} // namespace simdutf
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/src, filename=simdutf/westmere/end.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/src, filename=simdutf/westmere/end.h
/* begin file src/simdutf/westmere/end.h */
#if SIMDUTF_CAN_ALWAYS_RUN_WESTMERE
// nothing needed.
diff --git a/deps/simdutf/simdutf.h b/deps/simdutf/simdutf.h
index 56ab9ecd5256ce..f63c163c0f2b30 100644
--- a/deps/simdutf/simdutf.h
+++ b/deps/simdutf/simdutf.h
@@ -1,11 +1,11 @@
-/* auto-generated on 2023-05-22 21:46:48 -0400. Do not edit! */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf.h
+/* auto-generated on 2023-06-05 08:58:28 -0400. Do not edit! */
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf.h
/* begin file include/simdutf.h */
#ifndef SIMDUTF_H
#define SIMDUTF_H
#include
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/compiler_check.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/compiler_check.h
/* begin file include/simdutf/compiler_check.h */
#ifndef SIMDUTF_COMPILER_CHECK_H
#define SIMDUTF_COMPILER_CHECK_H
@@ -43,13 +43,13 @@
#endif // SIMDUTF_COMPILER_CHECK_H
/* end file include/simdutf/compiler_check.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/common_defs.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/common_defs.h
/* begin file include/simdutf/common_defs.h */
#ifndef SIMDUTF_COMMON_DEFS_H
#define SIMDUTF_COMMON_DEFS_H
#include
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/portability.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/portability.h
/* begin file include/simdutf/portability.h */
#ifndef SIMDUTF_PORTABILITY_H
#define SIMDUTF_PORTABILITY_H
@@ -280,7 +280,7 @@ use a 64-bit target such as x64, 64-bit ARM or 64-bit PPC.")
#endif // SIMDUTF_PORTABILITY_H
/* end file include/simdutf/portability.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/avx512.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/avx512.h
/* begin file include/simdutf/avx512.h */
#ifndef SIMDUTF_AVX512_H_
#define SIMDUTF_AVX512_H_
@@ -460,19 +460,21 @@ use a 64-bit target such as x64, 64-bit ARM or 64-bit PPC.")
#endif // MSC_VER
-#if defined(SIMDUTF_VISUAL_STUDIO)
- /**
- * It does not matter here whether you are using
- * the regular visual studio or clang under visual
- * studio.
- */
- #if SIMDUTF_USING_LIBRARY
- #define SIMDUTF_DLLIMPORTEXPORT __declspec(dllimport)
+#ifndef SIMDUTF_DLLIMPORTEXPORT
+ #if defined(SIMDUTF_VISUAL_STUDIO)
+ /**
+ * It does not matter here whether you are using
+ * the regular visual studio or clang under visual
+ * studio.
+ */
+ #if SIMDUTF_USING_LIBRARY
+ #define SIMDUTF_DLLIMPORTEXPORT __declspec(dllimport)
+ #else
+ #define SIMDUTF_DLLIMPORTEXPORT __declspec(dllexport)
+ #endif
#else
- #define SIMDUTF_DLLIMPORTEXPORT __declspec(dllexport)
+ #define SIMDUTF_DLLIMPORTEXPORT
#endif
-#else
- #define SIMDUTF_DLLIMPORTEXPORT
#endif
/// If EXPR is an error, returns it.
@@ -481,7 +483,7 @@ use a 64-bit target such as x64, 64-bit ARM or 64-bit PPC.")
#endif // SIMDUTF_COMMON_DEFS_H
/* end file include/simdutf/common_defs.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/encoding_types.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/encoding_types.h
/* begin file include/simdutf/encoding_types.h */
#include
@@ -529,7 +531,7 @@ size_t bom_byte_size(encoding_type bom);
} // BOM namespace
} // simdutf namespace
/* end file include/simdutf/encoding_types.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/error.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/error.h
/* begin file include/simdutf/error.h */
#ifndef ERROR_H
#define ERROR_H
@@ -566,7 +568,7 @@ SIMDUTF_PUSH_DISABLE_WARNINGS
SIMDUTF_DISABLE_UNDESIRED_WARNINGS
// Public API
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/simdutf_version.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/simdutf_version.h
/* begin file include/simdutf/simdutf_version.h */
// /include/simdutf/simdutf_version.h automatically generated by release.py,
// do not change by hand
@@ -574,7 +576,7 @@ SIMDUTF_DISABLE_UNDESIRED_WARNINGS
#define SIMDUTF_SIMDUTF_VERSION_H
/** The version of simdutf being used (major.minor.revision) */
-#define SIMDUTF_VERSION "3.2.12"
+#define SIMDUTF_VERSION "3.2.14"
namespace simdutf {
enum {
@@ -589,13 +591,13 @@ enum {
/**
* The revision (major.minor.REVISION) of simdutf being used.
*/
- SIMDUTF_VERSION_REVISION = 12
+ SIMDUTF_VERSION_REVISION = 14
};
} // namespace simdutf
#endif // SIMDUTF_SIMDUTF_VERSION_H
/* end file include/simdutf/simdutf_version.h */
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/implementation.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/implementation.h
/* begin file include/simdutf/implementation.h */
#ifndef SIMDUTF_IMPLEMENTATION_H
#define SIMDUTF_IMPLEMENTATION_H
@@ -605,7 +607,7 @@ enum {
#endif
#include
#include
-// dofile: invoked with prepath=/Users/dlemire/CVS/github/simdutf/include, filename=simdutf/internal/isadetection.h
+// dofile: invoked with prepath=/Users/lemire/CVS/github/simdutf/include, filename=simdutf/internal/isadetection.h
/* begin file include/simdutf/internal/isadetection.h */
/* From
https://github.com/endorno/pytorch/blob/master/torch/lib/TH/generic/simd/simd.h
diff --git a/deps/uv/uv.gyp b/deps/uv/uv.gyp
index ddc30fafca8294..af6dccd8dedfa2 100644
--- a/deps/uv/uv.gyp
+++ b/deps/uv/uv.gyp
@@ -101,7 +101,6 @@
'include/uv/bsd.h',
'include/uv/aix.h',
'src/unix/async.c',
- 'src/unix/atomic-ops.h',
'src/unix/core.c',
'src/unix/dl.c',
'src/unix/fs.c',
@@ -115,7 +114,6 @@
'src/unix/process.c',
'src/unix/random-devurandom.c',
'src/unix/signal.c',
- 'src/unix/spinlock.h',
'src/unix/stream.c',
'src/unix/tcp.c',
'src/unix/thread.c',
diff --git a/deps/zlib/BUILD.gn b/deps/zlib/BUILD.gn
index 0ffd486d731d8d..9b3971041dffa0 100644
--- a/deps/zlib/BUILD.gn
+++ b/deps/zlib/BUILD.gn
@@ -359,7 +359,7 @@ component("zlib") {
if (is_android) {
import("//build/config/android/config.gni")
if (defined(android_ndk_root) && android_ndk_root != "") {
- deps += [ "//third_party/android_ndk:cpu_features" ]
+ deps += [ "//third_party/cpu_features:ndk_compat" ]
} else {
assert(false, "CPU detection requires the Android NDK")
}
diff --git a/deps/zlib/OWNERS b/deps/zlib/OWNERS
index ecffb59f051501..3a821578a748b3 100644
--- a/deps/zlib/OWNERS
+++ b/deps/zlib/OWNERS
@@ -1,5 +1,4 @@
agl@chromium.org
cavalcantii@chromium.org
cblume@chromium.org
-noel@chromium.org
scroggo@google.com
diff --git a/deps/zlib/adler32.c b/deps/zlib/adler32.c
index 8f8fbb904891ae..81c584f68e2331 100644
--- a/deps/zlib/adler32.c
+++ b/deps/zlib/adler32.c
@@ -7,8 +7,6 @@
#include "zutil.h"
-local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
-
#define BASE 65521U /* largest prime smaller than 65536 */
#define NMAX 5552
/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
@@ -65,11 +63,7 @@ local uLong adler32_combine_ OF((uLong adler1, uLong adler2, z_off64_t len2));
#endif
/* ========================================================================= */
-uLong ZEXPORT adler32_z(adler, buf, len)
- uLong adler;
- const Bytef *buf;
- z_size_t len;
-{
+uLong ZEXPORT adler32_z(uLong adler, const Bytef *buf, z_size_t len) {
unsigned long sum2;
unsigned n;
@@ -159,20 +153,12 @@ uLong ZEXPORT adler32_z(adler, buf, len)
}
/* ========================================================================= */
-uLong ZEXPORT adler32(adler, buf, len)
- uLong adler;
- const Bytef *buf;
- uInt len;
-{
+uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len) {
return adler32_z(adler, buf, len);
}
/* ========================================================================= */
-local uLong adler32_combine_(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off64_t len2;
-{
+local uLong adler32_combine_(uLong adler1, uLong adler2, z_off64_t len2) {
unsigned long sum1;
unsigned long sum2;
unsigned rem;
@@ -197,18 +183,10 @@ local uLong adler32_combine_(adler1, adler2, len2)
}
/* ========================================================================= */
-uLong ZEXPORT adler32_combine(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off_t len2;
-{
+uLong ZEXPORT adler32_combine(uLong adler1, uLong adler2, z_off_t len2) {
return adler32_combine_(adler1, adler2, len2);
}
-uLong ZEXPORT adler32_combine64(adler1, adler2, len2)
- uLong adler1;
- uLong adler2;
- z_off64_t len2;
-{
+uLong ZEXPORT adler32_combine64(uLong adler1, uLong adler2, z_off64_t len2) {
return adler32_combine_(adler1, adler2, len2);
}
diff --git a/deps/zlib/compress.c b/deps/zlib/compress.c
index 53f8d8d87f9b25..0f11a27c28a691 100644
--- a/deps/zlib/compress.c
+++ b/deps/zlib/compress.c
@@ -19,13 +19,8 @@
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
Z_STREAM_ERROR if the level parameter is invalid.
*/
-int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
- int level;
-{
+int ZEXPORT compress2(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen, int level) {
z_stream stream;
int err;
const uInt max = (uInt)-1;
@@ -65,12 +60,8 @@ int ZEXPORT compress2(dest, destLen, source, sourceLen, level)
/* ===========================================================================
*/
-int ZEXPORT compress(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
-{
+int ZEXPORT compress(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen) {
return compress2(dest, destLen, source, sourceLen, Z_DEFAULT_COMPRESSION);
}
@@ -78,9 +69,7 @@ int ZEXPORT compress(dest, destLen, source, sourceLen)
If the default memLevel or windowBits for deflateInit() is changed, then
this function needs to be updated.
*/
-uLong ZEXPORT compressBound(sourceLen)
- uLong sourceLen;
-{
+uLong ZEXPORT compressBound(uLong sourceLen) {
sourceLen = sourceLen + (sourceLen >> 12) + (sourceLen >> 14) +
(sourceLen >> 25) + 13;
/* FIXME(cavalcantii): usage of CRC32 Castagnoli as a hash function
diff --git a/deps/zlib/contrib/bench/OWNERS b/deps/zlib/contrib/bench/OWNERS
index f1e3d4f472e484..e69de29bb2d1d6 100644
--- a/deps/zlib/contrib/bench/OWNERS
+++ b/deps/zlib/contrib/bench/OWNERS
@@ -1 +0,0 @@
-noel@chromium.org
diff --git a/deps/zlib/contrib/optimizations/inffast_chunk.c b/deps/zlib/contrib/optimizations/inffast_chunk.c
index a38e14db037496..b1db452c2e7232 100644
--- a/deps/zlib/contrib/optimizations/inffast_chunk.c
+++ b/deps/zlib/contrib/optimizations/inffast_chunk.c
@@ -72,10 +72,7 @@
requires strm->avail_out >= 260 for each loop to avoid checking for
available output space while decoding.
*/
-void ZLIB_INTERNAL inflate_fast_chunk_(strm, start)
-z_streamp strm;
-unsigned start; /* inflate()'s starting value for strm->avail_out */
-{
+void ZLIB_INTERNAL inflate_fast_chunk_(z_streamp strm, unsigned start) {
struct inflate_state FAR *state;
z_const unsigned char FAR *in; /* local strm->next_in */
z_const unsigned char FAR *last; /* have enough input while in < last */
diff --git a/deps/zlib/contrib/optimizations/inffast_chunk.h b/deps/zlib/contrib/optimizations/inffast_chunk.h
index cc861bd09d1240..e75ee5e915e136 100644
--- a/deps/zlib/contrib/optimizations/inffast_chunk.h
+++ b/deps/zlib/contrib/optimizations/inffast_chunk.h
@@ -39,4 +39,4 @@
#define INFLATE_FAST_MIN_OUTPUT 260
#endif
-void ZLIB_INTERNAL inflate_fast_chunk_ OF((z_streamp strm, unsigned start));
+void ZLIB_INTERNAL inflate_fast_chunk_(z_streamp strm, unsigned start);
diff --git a/deps/zlib/contrib/optimizations/inflate.c b/deps/zlib/contrib/optimizations/inflate.c
index 8c062a6f0f2554..6ed87160f5350d 100644
--- a/deps/zlib/contrib/optimizations/inflate.c
+++ b/deps/zlib/contrib/optimizations/inflate.c
@@ -92,20 +92,7 @@
# endif
#endif
-/* function prototypes */
-local int inflateStateCheck OF((z_streamp strm));
-local void fixedtables OF((struct inflate_state FAR *state));
-local int updatewindow OF((z_streamp strm, const unsigned char FAR *end,
- unsigned copy));
-#ifdef BUILDFIXED
- void makefixed OF((void));
-#endif
-local unsigned syncsearch OF((unsigned FAR *have, const unsigned char FAR *buf,
- unsigned len));
-
-local int inflateStateCheck(strm)
-z_streamp strm;
-{
+local int inflateStateCheck(z_streamp strm) {
struct inflate_state FAR *state;
if (strm == Z_NULL ||
strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
@@ -117,9 +104,7 @@ z_streamp strm;
return 0;
}
-int ZEXPORT inflateResetKeep(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateResetKeep(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -143,9 +128,7 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateReset(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateReset(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -156,10 +139,7 @@ z_streamp strm;
return inflateResetKeep(strm);
}
-int ZEXPORT inflateReset2(strm, windowBits)
-z_streamp strm;
-int windowBits;
-{
+int ZEXPORT inflateReset2(z_streamp strm, int windowBits) {
int wrap;
struct inflate_state FAR *state;
@@ -196,12 +176,8 @@ int windowBits;
return inflateReset(strm);
}
-int ZEXPORT inflateInit2_(strm, windowBits, version, stream_size)
-z_streamp strm;
-int windowBits;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit2_(z_streamp strm, int windowBits,
+ const char *version, int stream_size) {
int ret;
struct inflate_state FAR *state;
@@ -241,19 +217,12 @@ int stream_size;
return ret;
}
-int ZEXPORT inflateInit_(strm, version, stream_size)
-z_streamp strm;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit_(z_streamp strm, const char *version,
+ int stream_size) {
return inflateInit2_(strm, DEF_WBITS, version, stream_size);
}
-int ZEXPORT inflatePrime(strm, bits, value)
-z_streamp strm;
-int bits;
-int value;
-{
+int ZEXPORT inflatePrime(z_streamp strm, int bits, int value) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -282,9 +251,7 @@ int value;
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
-local void fixedtables(state)
-struct inflate_state FAR *state;
-{
+local void fixedtables(struct inflate_state FAR *state) {
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
@@ -346,7 +313,7 @@ struct inflate_state FAR *state;
a.out > inffixed.h
*/
-void makefixed()
+void makefixed(void)
{
unsigned low, size;
struct inflate_state state;
@@ -400,11 +367,7 @@ void makefixed()
output will fall in the output data, making match copies simpler and faster.
The advantage may be dependent on the size of the processor's data caches.
*/
-local int updatewindow(strm, end, copy)
-z_streamp strm;
-const Bytef *end;
-unsigned copy;
-{
+local int updatewindow(z_streamp strm, const Bytef *end, unsigned copy) {
struct inflate_state FAR *state;
unsigned dist;
@@ -636,10 +599,7 @@ unsigned copy;
will return Z_BUF_ERROR if it has not reached the end of the stream.
*/
-int ZEXPORT inflate(strm, flush)
-z_streamp strm;
-int flush;
-{
+int ZEXPORT inflate(z_streamp strm, int flush) {
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
@@ -1338,9 +1298,7 @@ int flush;
return ret;
}
-int ZEXPORT inflateEnd(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateEnd(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
return Z_STREAM_ERROR;
@@ -1352,11 +1310,8 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateGetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-Bytef *dictionary;
-uInt *dictLength;
-{
+int ZEXPORT inflateGetDictionary(z_streamp strm, Bytef *dictionary,
+ uInt *dictLength) {
struct inflate_state FAR *state;
/* check state */
@@ -1375,11 +1330,8 @@ uInt *dictLength;
return Z_OK;
}
-int ZEXPORT inflateSetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-const Bytef *dictionary;
-uInt dictLength;
-{
+int ZEXPORT inflateSetDictionary(z_streamp strm, const Bytef *dictionary,
+ uInt dictLength) {
struct inflate_state FAR *state;
unsigned long dictid;
int ret;
@@ -1410,10 +1362,7 @@ uInt dictLength;
return Z_OK;
}
-int ZEXPORT inflateGetHeader(strm, head)
-z_streamp strm;
-gz_headerp head;
-{
+int ZEXPORT inflateGetHeader(z_streamp strm, gz_headerp head) {
struct inflate_state FAR *state;
/* check state */
@@ -1438,11 +1387,8 @@ gz_headerp head;
called again with more data and the *have state. *have is initialized to
zero for the first call.
*/
-local unsigned syncsearch(have, buf, len)
-unsigned FAR *have;
-const unsigned char FAR *buf;
-unsigned len;
-{
+local unsigned syncsearch(unsigned FAR *have, const unsigned char FAR *buf,
+ unsigned len) {
unsigned got;
unsigned next;
@@ -1461,9 +1407,7 @@ unsigned len;
return next;
}
-int ZEXPORT inflateSync(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSync(z_streamp strm) {
unsigned len; /* number of bytes to look at or looked at */
int flags; /* temporary to save header status */
unsigned long in, out; /* temporary to save total_in and total_out */
@@ -1519,9 +1463,7 @@ z_streamp strm;
block. When decompressing, PPP checks that at the end of input packet,
inflate is waiting for these length bytes.
*/
-int ZEXPORT inflateSyncPoint(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSyncPoint(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1529,10 +1471,7 @@ z_streamp strm;
return state->mode == STORED && state->bits == 0;
}
-int ZEXPORT inflateCopy(dest, source)
-z_streamp dest;
-z_streamp source;
-{
+int ZEXPORT inflateCopy(z_streamp dest, z_streamp source) {
struct inflate_state FAR *state;
struct inflate_state FAR *copy;
unsigned char FAR *window;
@@ -1576,10 +1515,7 @@ z_streamp source;
return Z_OK;
}
-int ZEXPORT inflateUndermine(strm, subvert)
-z_streamp strm;
-int subvert;
-{
+int ZEXPORT inflateUndermine(z_streamp strm, int subvert) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1594,10 +1530,7 @@ int subvert;
#endif
}
-int ZEXPORT inflateValidate(strm, check)
-z_streamp strm;
-int check;
-{
+int ZEXPORT inflateValidate(z_streamp strm, int check) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1609,9 +1542,7 @@ int check;
return Z_OK;
}
-long ZEXPORT inflateMark(strm)
-z_streamp strm;
-{
+long ZEXPORT inflateMark(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
@@ -1622,9 +1553,7 @@ z_streamp strm;
(state->mode == MATCH ? state->was - state->length : 0));
}
-unsigned long ZEXPORT inflateCodesUsed(strm)
-z_streamp strm;
-{
+unsigned long ZEXPORT inflateCodesUsed(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return (unsigned long)-1;
state = (struct inflate_state FAR *)strm->state;
diff --git a/deps/zlib/contrib/tests/fuzzers/OWNERS b/deps/zlib/contrib/tests/fuzzers/OWNERS
index 0ae52574020a3d..ff3256199006fd 100644
--- a/deps/zlib/contrib/tests/fuzzers/OWNERS
+++ b/deps/zlib/contrib/tests/fuzzers/OWNERS
@@ -1,3 +1,2 @@
cblume@chromium.org
hans@chromium.org
-noel@chromium.org
diff --git a/deps/zlib/contrib/tests/fuzzers/deflate_fuzzer.cc b/deps/zlib/contrib/tests/fuzzers/deflate_fuzzer.cc
index ad1a985c68334a..64892bc5539e3b 100644
--- a/deps/zlib/contrib/tests/fuzzers/deflate_fuzzer.cc
+++ b/deps/zlib/contrib/tests/fuzzers/deflate_fuzzer.cc
@@ -38,6 +38,9 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
int ret =
deflateInit2(&stream, level, Z_DEFLATED, windowBits, memLevel, strategy);
ASSERT(ret == Z_OK);
+
+ size_t deflate_bound = deflateBound(&stream, src.size());
+
std::vector compressed(src.size() * 2 + 1000);
stream.next_out = compressed.data();
stream.avail_out = compressed.size();
@@ -54,6 +57,9 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
compressed.resize(compressed.size() - stream.avail_out);
deflateEnd(&stream);
+ // Check that the bound was correct.
+ ASSERT(compressed.size() <= deflate_bound);
+
// Verify that the data decompresses correctly.
ret = inflateInit2(&stream, windowBits);
ASSERT(ret == Z_OK);
diff --git a/deps/zlib/contrib/tests/infcover.cc b/deps/zlib/contrib/tests/infcover.cc
index 2ab0a4bec28b1d..4c55d5d7d5f5e0 100644
--- a/deps/zlib/contrib/tests/infcover.cc
+++ b/deps/zlib/contrib/tests/infcover.cc
@@ -395,9 +395,7 @@ void cover_support(void)
mem_setup(&strm);
strm.avail_in = 0;
strm.next_in = Z_NULL;
- char versioncpy[] = ZLIB_VERSION;
- versioncpy[0] -= 1;
- ret = inflateInit_(&strm, versioncpy, (int)sizeof(z_stream));
+ ret = inflateInit_(&strm, "!", (int)sizeof(z_stream));
assert(ret == Z_VERSION_ERROR);
mem_done(&strm, "wrong version");
@@ -486,7 +484,8 @@ local unsigned pull(void *desc, unsigned char **buf)
local int push(void *desc, unsigned char *buf, unsigned len)
{
- buf += len;
+ (void)buf;
+ (void)len;
return desc != Z_NULL; /* force error if desc not null */
}
diff --git a/deps/zlib/crc32.c b/deps/zlib/crc32.c
index acb6972f4bdbc8..1e1a57519cbda6 100644
--- a/deps/zlib/crc32.c
+++ b/deps/zlib/crc32.c
@@ -110,19 +110,6 @@
# define ARMCRC32_CANONICAL_ZLIB
#endif
-/* Local functions. */
-local z_crc_t multmodp OF((z_crc_t a, z_crc_t b));
-local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
-
-#if defined(W) && (!defined(ARMCRC32_CANONICAL_ZLIB) || defined(DYNAMIC_CRC_TABLE))
- local z_word_t byte_swap OF((z_word_t word));
-#endif
-
-#if defined(W) && !defined(ARMCRC32_CANONICAL_ZLIB)
- local z_crc_t crc_word OF((z_word_t data));
- local z_word_t crc_word_big OF((z_word_t data));
-#endif
-
#if defined(W) && (!defined(ARMCRC32_CANONICAL_ZLIB) || defined(DYNAMIC_CRC_TABLE))
/*
Swap the bytes in a z_word_t to convert between little and big endian. Any
@@ -130,9 +117,7 @@ local z_crc_t x2nmodp OF((z_off64_t n, unsigned k));
instruction, if one is available. This assumes that word_t is either 32 bits
or 64 bits.
*/
-local z_word_t byte_swap(word)
- z_word_t word;
-{
+local z_word_t byte_swap(z_word_t word) {
# if W == 8
return
(word & 0xff00000000000000) >> 56 |
@@ -153,24 +138,77 @@ local z_word_t byte_swap(word)
}
#endif
+#ifdef DYNAMIC_CRC_TABLE
+/* =========================================================================
+ * Table of powers of x for combining CRC-32s, filled in by make_crc_table()
+ * below.
+ */
+ local z_crc_t FAR x2n_table[32];
+#else
+/* =========================================================================
+ * Tables for byte-wise and braided CRC-32 calculations, and a table of powers
+ * of x for combining CRC-32s, all made by make_crc_table().
+ */
+# include "crc32.h"
+#endif
+
/* CRC polynomial. */
#define POLY 0xedb88320 /* p(x) reflected, with x^32 implied */
-#ifdef DYNAMIC_CRC_TABLE
+/*
+ Return a(x) multiplied by b(x) modulo p(x), where p(x) is the CRC polynomial,
+ reflected. For speed, this requires that a not be zero.
+ */
+local z_crc_t multmodp(z_crc_t a, z_crc_t b) {
+ z_crc_t m, p;
+ m = (z_crc_t)1 << 31;
+ p = 0;
+ for (;;) {
+ if (a & m) {
+ p ^= b;
+ if ((a & (m - 1)) == 0)
+ break;
+ }
+ m >>= 1;
+ b = b & 1 ? (b >> 1) ^ POLY : b >> 1;
+ }
+ return p;
+}
+
+/*
+ Return x^(n * 2^k) modulo p(x). Requires that x2n_table[] has been
+ initialized.
+ */
+local z_crc_t x2nmodp(z_off64_t n, unsigned k) {
+ z_crc_t p;
+
+ p = (z_crc_t)1 << 31; /* x^0 == 1 */
+ while (n) {
+ if (n & 1)
+ p = multmodp(x2n_table[k & 31], p);
+ n >>= 1;
+ k++;
+ }
+ return p;
+}
+
+#ifdef DYNAMIC_CRC_TABLE
+/* =========================================================================
+ * Build the tables for byte-wise and braided CRC-32 calculations, and a table
+ * of powers of x for combining CRC-32s.
+ */
local z_crc_t FAR crc_table[256];
-local z_crc_t FAR x2n_table[32];
-local void make_crc_table OF((void));
#ifdef W
local z_word_t FAR crc_big_table[256];
local z_crc_t FAR crc_braid_table[W][256];
local z_word_t FAR crc_braid_big_table[W][256];
- local void braid OF((z_crc_t [][256], z_word_t [][256], int, int));
+ local void braid(z_crc_t [][256], z_word_t [][256], int, int);
#endif
#ifdef MAKECRCH
- local void write_table OF((FILE *, const z_crc_t FAR *, int));
- local void write_table32hi OF((FILE *, const z_word_t FAR *, int));
- local void write_table64 OF((FILE *, const z_word_t FAR *, int));
+ local void write_table(FILE *, const z_crc_t FAR *, int);
+ local void write_table32hi(FILE *, const z_word_t FAR *, int);
+ local void write_table64(FILE *, const z_word_t FAR *, int);
#endif /* MAKECRCH */
/*
@@ -183,7 +221,6 @@ local void make_crc_table OF((void));
/* Definition of once functionality. */
typedef struct once_s once_t;
-local void once OF((once_t *, void (*)(void)));
/* Check for the availability of atomics. */
#if defined(__STDC__) && __STDC_VERSION__ >= 201112L && \
@@ -203,10 +240,7 @@ struct once_s {
invoke once() at the same time. The state must be a once_t initialized with
ONCE_INIT.
*/
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
+local void once(once_t *state, void (*init)(void)) {
if (!atomic_load(&state->done)) {
if (atomic_flag_test_and_set(&state->begun))
while (!atomic_load(&state->done))
@@ -229,10 +263,7 @@ struct once_s {
/* Test and set. Alas, not atomic, but tries to minimize the period of
vulnerability. */
-local int test_and_set OF((int volatile *));
-local int test_and_set(flag)
- int volatile *flag;
-{
+local int test_and_set(int volatile *flag) {
int was;
was = *flag;
@@ -241,10 +272,7 @@ local int test_and_set(flag)
}
/* Run the provided init() function once. This is not thread-safe. */
-local void once(state, init)
- once_t *state;
- void (*init)(void);
-{
+local void once(once_t *state, void (*init)(void)) {
if (!state->done) {
if (test_and_set(&state->begun))
while (!state->done)
@@ -285,7 +313,7 @@ local once_t made = ONCE_INIT;
information needed to generate CRCs on data a byte at a time for all
combinations of CRC register values and incoming bytes.
*/
-local void make_crc_table()
+local void make_crc_table(void)
{
unsigned i, j, n;
z_crc_t p;
@@ -453,11 +481,7 @@ local void make_crc_table()
Write the 32-bit values in table[0..k-1] to out, five per line in
hexadecimal separated by commas.
*/
-local void write_table(out, table, k)
- FILE *out;
- const z_crc_t FAR *table;
- int k;
-{
+local void write_table(FILE *out, const z_crc_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -470,11 +494,7 @@ local void write_table(out, table, k)
Write the high 32-bits of each value in table[0..k-1] to out, five per line
in hexadecimal separated by commas.
*/
-local void write_table32hi(out, table, k)
-FILE *out;
-const z_word_t FAR *table;
-int k;
-{
+local void write_table32hi(FILE *out, const z_word_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -490,11 +510,7 @@ int k;
bits. If not, then the type cast and format string can be adjusted
accordingly.
*/
-local void write_table64(out, table, k)
- FILE *out;
- const z_word_t FAR *table;
- int k;
-{
+local void write_table64(FILE *out, const z_word_t FAR *table, int k) {
int n;
for (n = 0; n < k; n++)
@@ -504,8 +520,7 @@ local void write_table64(out, table, k)
}
/* Actually do the deed. */
-int main()
-{
+int main(void) {
make_crc_table();
return 0;
}
@@ -517,12 +532,7 @@ int main()
Generate the little and big-endian braid tables for the given n and z_word_t
size w. Each array must have room for w blocks of 256 elements.
*/
-local void braid(ltl, big, n, w)
- z_crc_t ltl[][256];
- z_word_t big[][256];
- int n;
- int w;
-{
+local void braid(z_crc_t ltl[][256], z_word_t big[][256], int n, int w) {
int k;
z_crc_t i, p, q;
for (k = 0; k < w; k++) {
@@ -537,69 +547,13 @@ local void braid(ltl, big, n, w)
}
#endif
-#else /* !DYNAMIC_CRC_TABLE */
-/* ========================================================================
- * Tables for byte-wise and braided CRC-32 calculations, and a table of powers
- * of x for combining CRC-32s, all made by make_crc_table().
- */
-#include "crc32.h"
#endif /* DYNAMIC_CRC_TABLE */
-/* ========================================================================
- * Routines used for CRC calculation. Some are also required for the table
- * generation above.
- */
-
-/*
- Return a(x) multiplied by b(x) modulo p(x), where p(x) is the CRC polynomial,
- reflected. For speed, this requires that a not be zero.
- */
-local z_crc_t multmodp(a, b)
- z_crc_t a;
- z_crc_t b;
-{
- z_crc_t m, p;
-
- m = (z_crc_t)1 << 31;
- p = 0;
- for (;;) {
- if (a & m) {
- p ^= b;
- if ((a & (m - 1)) == 0)
- break;
- }
- m >>= 1;
- b = b & 1 ? (b >> 1) ^ POLY : b >> 1;
- }
- return p;
-}
-
-/*
- Return x^(n * 2^k) modulo p(x). Requires that x2n_table[] has been
- initialized.
- */
-local z_crc_t x2nmodp(n, k)
- z_off64_t n;
- unsigned k;
-{
- z_crc_t p;
-
- p = (z_crc_t)1 << 31; /* x^0 == 1 */
- while (n) {
- if (n & 1)
- p = multmodp(x2n_table[k & 31], p);
- n >>= 1;
- k++;
- }
- return p;
-}
-
/* =========================================================================
* This function can be used by asm versions of crc32(), and to force the
* generation of the CRC tables in a threaded application.
*/
-const z_crc_t FAR * ZEXPORT get_crc_table()
-{
+const z_crc_t FAR * ZEXPORT get_crc_table(void) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -625,11 +579,8 @@ const z_crc_t FAR * ZEXPORT get_crc_table()
#define Z_BATCH_ZEROS 0xa10d3d0c /* computed from Z_BATCH = 3990 */
#define Z_BATCH_MIN 800 /* fewest words in a final batch */
-unsigned long ZEXPORT crc32_z(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- z_size_t len;
-{
+unsigned long ZEXPORT crc32_z(unsigned long crc, const unsigned char FAR *buf,
+ z_size_t len) {
z_crc_t val;
z_word_t crc1, crc2;
const z_word_t *word;
@@ -729,18 +680,14 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
least-significant byte of the word as the first byte of data, without any pre
or post conditioning. This is used to combine the CRCs of each braid.
*/
-local z_crc_t crc_word(data)
- z_word_t data;
-{
+local z_crc_t crc_word(z_word_t data) {
int k;
for (k = 0; k < W; k++)
data = (data >> 8) ^ crc_table[data & 0xff];
return (z_crc_t)data;
}
-local z_word_t crc_word_big(data)
- z_word_t data;
-{
+local z_word_t crc_word_big(z_word_t data) {
int k;
for (k = 0; k < W; k++)
data = (data << 8) ^
@@ -751,11 +698,8 @@ local z_word_t crc_word_big(data)
#endif
/* ========================================================================= */
-unsigned long ZEXPORT crc32_z(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- z_size_t len;
-{
+unsigned long ZEXPORT crc32_z(unsigned long crc, const unsigned char FAR *buf,
+ z_size_t len) {
/*
* zlib convention is to call crc32(0, NULL, 0); before making
* calls to crc32(). So this is a good, early (and infrequent)
@@ -1136,11 +1080,8 @@ unsigned long ZEXPORT crc32_z(crc, buf, len)
#endif
/* ========================================================================= */
-unsigned long ZEXPORT crc32(crc, buf, len)
- unsigned long crc;
- const unsigned char FAR *buf;
- uInt len;
-{
+unsigned long ZEXPORT crc32(unsigned long crc, const unsigned char FAR *buf,
+ uInt len) {
/* Some bots compile with optimizations disabled, others will emulate
* ARM on x86 and other weird combinations.
*/
@@ -1180,11 +1121,7 @@ unsigned long ZEXPORT crc32(crc, buf, len)
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
- uLong crc1;
- uLong crc2;
- z_off64_t len2;
-{
+uLong ZEXPORT crc32_combine64(uLong crc1, uLong crc2, z_off64_t len2) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -1192,17 +1129,11 @@ uLong ZEXPORT crc32_combine64(crc1, crc2, len2)
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine(crc1, crc2, len2)
- uLong crc1;
- uLong crc2;
- z_off_t len2;
-{
+uLong ZEXPORT crc32_combine(uLong crc1, uLong crc2, z_off_t len2) {
return crc32_combine64(crc1, crc2, (z_off64_t)len2);
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen64(len2)
- z_off64_t len2;
-{
+uLong ZEXPORT crc32_combine_gen64(z_off64_t len2) {
#ifdef DYNAMIC_CRC_TABLE
once(&made, make_crc_table);
#endif /* DYNAMIC_CRC_TABLE */
@@ -1210,18 +1141,12 @@ uLong ZEXPORT crc32_combine_gen64(len2)
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_gen(len2)
- z_off_t len2;
-{
+uLong ZEXPORT crc32_combine_gen(z_off_t len2) {
return crc32_combine_gen64((z_off64_t)len2);
}
/* ========================================================================= */
-uLong ZEXPORT crc32_combine_op(crc1, crc2, op)
- uLong crc1;
- uLong crc2;
- uLong op;
-{
+uLong ZEXPORT crc32_combine_op(uLong crc1, uLong crc2, uLong op) {
return multmodp(op, crc1) ^ (crc2 & 0xffffffff);
}
diff --git a/deps/zlib/deflate.c b/deps/zlib/deflate.c
index 7dc589ccc940c6..1fa55e5d010618 100644
--- a/deps/zlib/deflate.c
+++ b/deps/zlib/deflate.c
@@ -73,9 +73,6 @@ const char deflate_copyright[] =
copyright string in the executable of your product.
*/
-/* ===========================================================================
- * Function prototypes.
- */
typedef enum {
need_more, /* block not completed, need more input or more output */
block_done, /* block flush performed */
@@ -83,29 +80,16 @@ typedef enum {
finish_done /* finish done, accept no more input or output */
} block_state;
-typedef block_state (*compress_func) OF((deflate_state *s, int flush));
+typedef block_state (*compress_func)(deflate_state *s, int flush);
/* Compression function. Returns the block state after the call. */
-local int deflateStateCheck OF((z_streamp strm));
-local void slide_hash OF((deflate_state *s));
-local void fill_window OF((deflate_state *s));
-local block_state deflate_stored OF((deflate_state *s, int flush));
-local block_state deflate_fast OF((deflate_state *s, int flush));
+local block_state deflate_stored(deflate_state *s, int flush);
+local block_state deflate_fast(deflate_state *s, int flush);
#ifndef FASTEST
-local block_state deflate_slow OF((deflate_state *s, int flush));
-#endif
-local block_state deflate_rle OF((deflate_state *s, int flush));
-local block_state deflate_huff OF((deflate_state *s, int flush));
-local void lm_init OF((deflate_state *s));
-local void putShortMSB OF((deflate_state *s, uInt b));
-local void flush_pending OF((z_streamp strm));
-local unsigned read_buf OF((z_streamp strm, Bytef *buf, unsigned size));
-local uInt longest_match OF((deflate_state *s, IPos cur_match));
-
-#ifdef ZLIB_DEBUG
-local void check_match OF((deflate_state *s, IPos start, IPos match,
- int length));
+local block_state deflate_slow(deflate_state *s, int flush);
#endif
+local block_state deflate_rle(deflate_state *s, int flush);
+local block_state deflate_huff(deflate_state *s, int flush);
/* From crc32.c */
extern void ZLIB_INTERNAL crc_reset(deflate_state *const s);
@@ -184,9 +168,7 @@ local const config configuration_table[10] = {
* bit values at the expense of memory usage). We slide even when level == 0 to
* keep the hash table consistent if we switch back to level > 0 later.
*/
-local void slide_hash(s)
- deflate_state *s;
-{
+local void slide_hash(deflate_state *s) {
#if defined(DEFLATE_SLIDE_HASH_SSE2) || defined(DEFLATE_SLIDE_HASH_NEON)
slide_hash_simd(s->head, s->prev, s->w_size, s->hash_size);
return;
@@ -215,30 +197,195 @@ local void slide_hash(s)
#endif
}
+/* ===========================================================================
+ * Read a new buffer from the current input stream, update the adler32
+ * and total number of bytes read. All deflate() input goes through
+ * this function so some applications may wish to modify it to avoid
+ * allocating a large strm->next_in buffer and copying from it.
+ * (See also flush_pending()).
+ */
+local unsigned read_buf(z_streamp strm, Bytef *buf, unsigned size) {
+ unsigned len = strm->avail_in;
+
+ if (len > size) len = size;
+ if (len == 0) return 0;
+
+ strm->avail_in -= len;
+
+ /* TODO(cavalcantii): verify if we can remove 'copy_with_crc', it is legacy
+ * of the Intel optimizations dating back to 2015.
+ */
+#ifdef GZIP
+ if (strm->state->wrap == 2)
+ copy_with_crc(strm, buf, len);
+ else
+#endif
+ {
+ zmemcpy(buf, strm->next_in, len);
+ if (strm->state->wrap == 1)
+ strm->adler = adler32(strm->adler, buf, len);
+ }
+ strm->next_in += len;
+ strm->total_in += len;
+
+ return len;
+}
+
+/* ===========================================================================
+ * Fill the window when the lookahead becomes insufficient.
+ * Updates strstart and lookahead.
+ *
+ * IN assertion: lookahead < MIN_LOOKAHEAD
+ * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
+ * At least one byte has been read, or avail_in == 0; reads are
+ * performed for at least two bytes (required for the zip translate_eol
+ * option -- not supported here).
+ */
+local void fill_window(deflate_state *s) {
+ unsigned n;
+ unsigned more; /* Amount of free space at the end of the window. */
+ uInt wsize = s->w_size;
+
+ Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
+
+ do {
+ more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
+
+ /* Deal with !@#$% 64K limit: */
+ if (sizeof(int) <= 2) {
+ if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
+ more = wsize;
+
+ } else if (more == (unsigned)(-1)) {
+ /* Very unlikely, but possible on 16 bit machine if
+ * strstart == 0 && lookahead == 1 (input done a byte at time)
+ */
+ more--;
+ }
+ }
+
+ /* If the window is almost full and there is insufficient lookahead,
+ * move the upper half to the lower one to make room in the upper half.
+ */
+ if (s->strstart >= wsize + MAX_DIST(s)) {
+
+ zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
+ s->match_start -= wsize;
+ s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
+ s->block_start -= (long) wsize;
+ if (s->insert > s->strstart)
+ s->insert = s->strstart;
+ slide_hash(s);
+ more += wsize;
+ }
+ if (s->strm->avail_in == 0) break;
+
+ /* If there was no sliding:
+ * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
+ * more == window_size - lookahead - strstart
+ * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
+ * => more >= window_size - 2*WSIZE + 2
+ * In the BIG_MEM or MMAP case (not yet supported),
+ * window_size == input_size + MIN_LOOKAHEAD &&
+ * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
+ * Otherwise, window_size == 2*WSIZE so more >= 2.
+ * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
+ */
+ Assert(more >= 2, "more < 2");
+
+ n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
+ s->lookahead += n;
+
+ /* Initialize the hash value now that we have some input: */
+ if (s->chromium_zlib_hash) {
+ /* chromium hash reads 4 bytes */
+ if (s->lookahead + s->insert > MIN_MATCH) {
+ uInt str = s->strstart - s->insert;
+ while (s->insert) {
+ insert_string(s, str);
+ str++;
+ s->insert--;
+ if (s->lookahead + s->insert <= MIN_MATCH)
+ break;
+ }
+ }
+ } else
+ /* Initialize the hash value now that we have some input: */
+ if (s->lookahead + s->insert >= MIN_MATCH) {
+ uInt str = s->strstart - s->insert;
+ s->ins_h = s->window[str];
+ UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
+#if MIN_MATCH != 3
+ Call UPDATE_HASH() MIN_MATCH-3 more times
+#endif
+ while (s->insert) {
+ UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
+#ifndef FASTEST
+ s->prev[str & s->w_mask] = s->head[s->ins_h];
+#endif
+ s->head[s->ins_h] = (Pos)str;
+ str++;
+ s->insert--;
+ if (s->lookahead + s->insert < MIN_MATCH)
+ break;
+ }
+ }
+ /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
+ * but this is not important since only literal bytes will be emitted.
+ */
+
+ } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
+
+ /* If the WIN_INIT bytes after the end of the current data have never been
+ * written, then zero those bytes in order to avoid memory check reports of
+ * the use of uninitialized (or uninitialised as Julian writes) bytes by
+ * the longest match routines. Update the high water mark for the next
+ * time through here. WIN_INIT is set to MAX_MATCH since the longest match
+ * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
+ */
+ if (s->high_water < s->window_size) {
+ ulg curr = s->strstart + (ulg)(s->lookahead);
+ ulg init;
+
+ if (s->high_water < curr) {
+ /* Previous high water mark below current data -- zero WIN_INIT
+ * bytes or up to end of window, whichever is less.
+ */
+ init = s->window_size - curr;
+ if (init > WIN_INIT)
+ init = WIN_INIT;
+ zmemzero(s->window + curr, (unsigned)init);
+ s->high_water = curr + init;
+ }
+ else if (s->high_water < (ulg)curr + WIN_INIT) {
+ /* High water mark at or above current data, but below current data
+ * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
+ * to end of window, whichever is less.
+ */
+ init = (ulg)curr + WIN_INIT - s->high_water;
+ if (init > s->window_size - s->high_water)
+ init = s->window_size - s->high_water;
+ zmemzero(s->window + s->high_water, (unsigned)init);
+ s->high_water += init;
+ }
+ }
+
+ Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
+ "not enough room for search");
+}
+
/* ========================================================================= */
-int ZEXPORT deflateInit_(strm, level, version, stream_size)
- z_streamp strm;
- int level;
- const char *version;
- int stream_size;
-{
+int ZEXPORT deflateInit_(z_streamp strm, int level, const char *version,
+ int stream_size) {
return deflateInit2_(strm, level, Z_DEFLATED, MAX_WBITS, DEF_MEM_LEVEL,
Z_DEFAULT_STRATEGY, version, stream_size);
/* To do: ignore strm->next_in if we use it as window */
}
/* ========================================================================= */
-int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
- version, stream_size)
- z_streamp strm;
- int level;
- int method;
- int windowBits;
- int memLevel;
- int strategy;
- const char *version;
- int stream_size;
-{
+int ZEXPORT deflateInit2_(z_streamp strm, int level, int method,
+ int windowBits, int memLevel, int strategy,
+ const char *version, int stream_size) {
unsigned window_padding = 8;
deflate_state *s;
int wrap = 1;
@@ -412,9 +559,7 @@ int ZEXPORT deflateInit2_(strm, level, method, windowBits, memLevel, strategy,
/* =========================================================================
* Check for a valid deflate stream state. Return 0 if ok, 1 if not.
*/
-local int deflateStateCheck(strm)
- z_streamp strm;
-{
+local int deflateStateCheck(z_streamp strm) {
deflate_state *s;
if (strm == Z_NULL ||
strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
@@ -435,11 +580,8 @@ local int deflateStateCheck(strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
- z_streamp strm;
- const Bytef *dictionary;
- uInt dictLength;
-{
+int ZEXPORT deflateSetDictionary(z_streamp strm, const Bytef *dictionary,
+ uInt dictLength) {
deflate_state *s;
uInt str, n;
int wrap;
@@ -500,11 +642,8 @@ int ZEXPORT deflateSetDictionary(strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
- z_streamp strm;
- Bytef *dictionary;
- uInt *dictLength;
-{
+int ZEXPORT deflateGetDictionary(z_streamp strm, Bytef *dictionary,
+ uInt *dictLength) {
deflate_state *s;
uInt len;
@@ -522,9 +661,7 @@ int ZEXPORT deflateGetDictionary(strm, dictionary, dictLength)
}
/* ========================================================================= */
-int ZEXPORT deflateResetKeep(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateResetKeep(z_streamp strm) {
deflate_state *s;
if (deflateStateCheck(strm)) {
@@ -559,10 +696,32 @@ int ZEXPORT deflateResetKeep(strm)
return Z_OK;
}
+/* ===========================================================================
+ * Initialize the "longest match" routines for a new zlib stream
+ */
+local void lm_init(deflate_state *s) {
+ s->window_size = (ulg)2L*s->w_size;
+
+ CLEAR_HASH(s);
+
+ /* Set the default configuration parameters:
+ */
+ s->max_lazy_match = configuration_table[s->level].max_lazy;
+ s->good_match = configuration_table[s->level].good_length;
+ s->nice_match = configuration_table[s->level].nice_length;
+ s->max_chain_length = configuration_table[s->level].max_chain;
+
+ s->strstart = 0;
+ s->block_start = 0L;
+ s->lookahead = 0;
+ s->insert = 0;
+ s->match_length = s->prev_length = MIN_MATCH-1;
+ s->match_available = 0;
+ s->ins_h = 0;
+}
+
/* ========================================================================= */
-int ZEXPORT deflateReset(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateReset(z_streamp strm) {
int ret;
ret = deflateResetKeep(strm);
@@ -572,10 +731,7 @@ int ZEXPORT deflateReset(strm)
}
/* ========================================================================= */
-int ZEXPORT deflateSetHeader(strm, head)
- z_streamp strm;
- gz_headerp head;
-{
+int ZEXPORT deflateSetHeader(z_streamp strm, gz_headerp head) {
if (deflateStateCheck(strm) || strm->state->wrap != 2)
return Z_STREAM_ERROR;
strm->state->gzhead = head;
@@ -583,11 +739,7 @@ int ZEXPORT deflateSetHeader(strm, head)
}
/* ========================================================================= */
-int ZEXPORT deflatePending(strm, pending, bits)
- unsigned *pending;
- int *bits;
- z_streamp strm;
-{
+int ZEXPORT deflatePending(z_streamp strm, unsigned *pending, int *bits) {
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
if (pending != Z_NULL)
*pending = strm->state->pending;
@@ -597,11 +749,7 @@ int ZEXPORT deflatePending(strm, pending, bits)
}
/* ========================================================================= */
-int ZEXPORT deflatePrime(strm, bits, value)
- z_streamp strm;
- int bits;
- int value;
-{
+int ZEXPORT deflatePrime(z_streamp strm, int bits, int value) {
deflate_state *s;
int put;
@@ -624,11 +772,7 @@ int ZEXPORT deflatePrime(strm, bits, value)
}
/* ========================================================================= */
-int ZEXPORT deflateParams(strm, level, strategy)
- z_streamp strm;
- int level;
- int strategy;
-{
+int ZEXPORT deflateParams(z_streamp strm, int level, int strategy) {
deflate_state *s;
compress_func func;
@@ -673,13 +817,8 @@ int ZEXPORT deflateParams(strm, level, strategy)
}
/* ========================================================================= */
-int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
- z_streamp strm;
- int good_length;
- int max_lazy;
- int nice_length;
- int max_chain;
-{
+int ZEXPORT deflateTune(z_streamp strm, int good_length, int max_lazy,
+ int nice_length, int max_chain) {
deflate_state *s;
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -715,10 +854,7 @@ int ZEXPORT deflateTune(strm, good_length, max_lazy, nice_length, max_chain)
*
* Shifts are used to approximate divisions, for speed.
*/
-uLong ZEXPORT deflateBound(strm, sourceLen)
- z_streamp strm;
- uLong sourceLen;
-{
+uLong ZEXPORT deflateBound(z_streamp strm, uLong sourceLen) {
deflate_state *s;
uLong fixedlen, storelen, wraplen;
@@ -788,10 +924,7 @@ uLong ZEXPORT deflateBound(strm, sourceLen)
* IN assertion: the stream state is correct and there is enough room in
* pending_buf.
*/
-local void putShortMSB(s, b)
- deflate_state *s;
- uInt b;
-{
+local void putShortMSB(deflate_state *s, uInt b) {
put_byte(s, (Byte)(b >> 8));
put_byte(s, (Byte)(b & 0xff));
}
@@ -802,9 +935,7 @@ local void putShortMSB(s, b)
* applications may wish to modify it to avoid allocating a large
* strm->next_out buffer and copying into it. (See also read_buf()).
*/
-local void flush_pending(strm)
- z_streamp strm;
-{
+local void flush_pending(z_streamp strm) {
unsigned len;
deflate_state *s = strm->state;
@@ -835,10 +966,7 @@ local void flush_pending(strm)
} while (0)
/* ========================================================================= */
-int ZEXPORT deflate(strm, flush)
- z_streamp strm;
- int flush;
-{
+int ZEXPORT deflate(z_streamp strm, int flush) {
int old_flush; /* value of flush param for previous deflate call */
deflate_state *s;
@@ -1151,9 +1279,7 @@ int ZEXPORT deflate(strm, flush)
}
/* ========================================================================= */
-int ZEXPORT deflateEnd(strm)
- z_streamp strm;
-{
+int ZEXPORT deflateEnd(z_streamp strm) {
int status;
if (deflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1177,11 +1303,10 @@ int ZEXPORT deflateEnd(strm)
* To simplify the source, this is not supported for 16-bit MSDOS (which
* doesn't have enough memory anyway to duplicate compression states).
*/
-int ZEXPORT deflateCopy(dest, source)
- z_streamp dest;
- z_streamp source;
-{
+int ZEXPORT deflateCopy(z_streamp dest, z_streamp source) {
#ifdef MAXSEG_64K
+ (void)dest;
+ (void)source;
return Z_STREAM_ERROR;
#else
deflate_state *ds;
@@ -1229,67 +1354,6 @@ int ZEXPORT deflateCopy(dest, source)
#endif /* MAXSEG_64K */
}
-/* ===========================================================================
- * Read a new buffer from the current input stream, update the adler32
- * and total number of bytes read. All deflate() input goes through
- * this function so some applications may wish to modify it to avoid
- * allocating a large strm->next_in buffer and copying from it.
- * (See also flush_pending()).
- */
-local unsigned read_buf(strm, buf, size)
- z_streamp strm;
- Bytef *buf;
- unsigned size;
-{
- unsigned len = strm->avail_in;
-
- if (len > size) len = size;
- if (len == 0) return 0;
-
- strm->avail_in -= len;
-
-#ifdef GZIP
- if (strm->state->wrap == 2)
- copy_with_crc(strm, buf, len);
- else
-#endif
- {
- zmemcpy(buf, strm->next_in, len);
- if (strm->state->wrap == 1)
- strm->adler = adler32(strm->adler, buf, len);
- }
- strm->next_in += len;
- strm->total_in += len;
-
- return len;
-}
-
-/* ===========================================================================
- * Initialize the "longest match" routines for a new zlib stream
- */
-local void lm_init(s)
- deflate_state *s;
-{
- s->window_size = (ulg)2L*s->w_size;
-
- CLEAR_HASH(s);
-
- /* Set the default configuration parameters:
- */
- s->max_lazy_match = configuration_table[s->level].max_lazy;
- s->good_match = configuration_table[s->level].good_length;
- s->nice_match = configuration_table[s->level].nice_length;
- s->max_chain_length = configuration_table[s->level].max_chain;
-
- s->strstart = 0;
- s->block_start = 0L;
- s->lookahead = 0;
- s->insert = 0;
- s->match_length = s->prev_length = MIN_MATCH-1;
- s->match_available = 0;
- s->ins_h = 0;
-}
-
#ifndef FASTEST
/* ===========================================================================
* Set match_start to the longest match starting at the given string and
@@ -1300,10 +1364,7 @@ local void lm_init(s)
* string (strstart) and its distance is <= MAX_DIST, and prev_length >= 1
* OUT assertion: the match length is not greater than s->lookahead.
*/
-local uInt longest_match(s, cur_match)
- deflate_state *s;
- IPos cur_match; /* current match */
-{
+local uInt longest_match(deflate_state *s, IPos cur_match) {
unsigned chain_length = s->max_chain_length;/* max hash chain length */
register Bytef *scan = s->window + s->strstart; /* current string */
register Bytef *match; /* matched string */
@@ -1469,10 +1530,7 @@ local uInt longest_match(s, cur_match)
/* ---------------------------------------------------------------------------
* Optimized version for FASTEST only
*/
-local uInt longest_match(s, cur_match)
- deflate_state *s;
- IPos cur_match; /* current match */
-{
+local uInt longest_match(deflate_state *s, IPos cur_match) {
register Bytef *scan = s->window + s->strstart; /* current string */
register Bytef *match; /* matched string */
register int len; /* length of current match */
@@ -1533,11 +1591,7 @@ local uInt longest_match(s, cur_match)
/* ===========================================================================
* Check that the match at match_start is indeed a match.
*/
-local void check_match(s, start, match, length)
- deflate_state *s;
- IPos start, match;
- int length;
-{
+local void check_match(deflate_state *s, IPos start, IPos match, int length) {
/* check that the match is indeed a match */
if (zmemcmp(s->window + match,
s->window + start, length) != EQUAL) {
@@ -1557,151 +1611,6 @@ local void check_match(s, start, match, length)
# define check_match(s, start, match, length)
#endif /* ZLIB_DEBUG */
-/* ===========================================================================
- * Fill the window when the lookahead becomes insufficient.
- * Updates strstart and lookahead.
- *
- * IN assertion: lookahead < MIN_LOOKAHEAD
- * OUT assertions: strstart <= window_size-MIN_LOOKAHEAD
- * At least one byte has been read, or avail_in == 0; reads are
- * performed for at least two bytes (required for the zip translate_eol
- * option -- not supported here).
- */
-local void fill_window(s)
- deflate_state *s;
-{
- unsigned n;
- unsigned more; /* Amount of free space at the end of the window. */
- uInt wsize = s->w_size;
-
- Assert(s->lookahead < MIN_LOOKAHEAD, "already enough lookahead");
-
- do {
- more = (unsigned)(s->window_size -(ulg)s->lookahead -(ulg)s->strstart);
-
- /* Deal with !@#$% 64K limit: */
- if (sizeof(int) <= 2) {
- if (more == 0 && s->strstart == 0 && s->lookahead == 0) {
- more = wsize;
-
- } else if (more == (unsigned)(-1)) {
- /* Very unlikely, but possible on 16 bit machine if
- * strstart == 0 && lookahead == 1 (input done a byte at time)
- */
- more--;
- }
- }
-
- /* If the window is almost full and there is insufficient lookahead,
- * move the upper half to the lower one to make room in the upper half.
- */
- if (s->strstart >= wsize + MAX_DIST(s)) {
-
- zmemcpy(s->window, s->window + wsize, (unsigned)wsize - more);
- s->match_start -= wsize;
- s->strstart -= wsize; /* we now have strstart >= MAX_DIST */
- s->block_start -= (long) wsize;
- if (s->insert > s->strstart)
- s->insert = s->strstart;
- slide_hash(s);
- more += wsize;
- }
- if (s->strm->avail_in == 0) break;
-
- /* If there was no sliding:
- * strstart <= WSIZE+MAX_DIST-1 && lookahead <= MIN_LOOKAHEAD - 1 &&
- * more == window_size - lookahead - strstart
- * => more >= window_size - (MIN_LOOKAHEAD-1 + WSIZE + MAX_DIST-1)
- * => more >= window_size - 2*WSIZE + 2
- * In the BIG_MEM or MMAP case (not yet supported),
- * window_size == input_size + MIN_LOOKAHEAD &&
- * strstart + s->lookahead <= input_size => more >= MIN_LOOKAHEAD.
- * Otherwise, window_size == 2*WSIZE so more >= 2.
- * If there was sliding, more >= WSIZE. So in all cases, more >= 2.
- */
- Assert(more >= 2, "more < 2");
-
- n = read_buf(s->strm, s->window + s->strstart + s->lookahead, more);
- s->lookahead += n;
-
- /* Initialize the hash value now that we have some input: */
- if (s->chromium_zlib_hash) {
- /* chromium hash reads 4 bytes */
- if (s->lookahead + s->insert > MIN_MATCH) {
- uInt str = s->strstart - s->insert;
- while (s->insert) {
- insert_string(s, str);
- str++;
- s->insert--;
- if (s->lookahead + s->insert <= MIN_MATCH)
- break;
- }
- }
- } else
- /* Initialize the hash value now that we have some input: */
- if (s->lookahead + s->insert >= MIN_MATCH) {
- uInt str = s->strstart - s->insert;
- s->ins_h = s->window[str];
- UPDATE_HASH(s, s->ins_h, s->window[str + 1]);
-#if MIN_MATCH != 3
- Call UPDATE_HASH() MIN_MATCH-3 more times
-#endif
- while (s->insert) {
- UPDATE_HASH(s, s->ins_h, s->window[str + MIN_MATCH-1]);
-#ifndef FASTEST
- s->prev[str & s->w_mask] = s->head[s->ins_h];
-#endif
- s->head[s->ins_h] = (Pos)str;
- str++;
- s->insert--;
- if (s->lookahead + s->insert < MIN_MATCH)
- break;
- }
- }
- /* If the whole input has less than MIN_MATCH bytes, ins_h is garbage,
- * but this is not important since only literal bytes will be emitted.
- */
-
- } while (s->lookahead < MIN_LOOKAHEAD && s->strm->avail_in != 0);
-
- /* If the WIN_INIT bytes after the end of the current data have never been
- * written, then zero those bytes in order to avoid memory check reports of
- * the use of uninitialized (or uninitialised as Julian writes) bytes by
- * the longest match routines. Update the high water mark for the next
- * time through here. WIN_INIT is set to MAX_MATCH since the longest match
- * routines allow scanning to strstart + MAX_MATCH, ignoring lookahead.
- */
- if (s->high_water < s->window_size) {
- ulg curr = s->strstart + (ulg)(s->lookahead);
- ulg init;
-
- if (s->high_water < curr) {
- /* Previous high water mark below current data -- zero WIN_INIT
- * bytes or up to end of window, whichever is less.
- */
- init = s->window_size - curr;
- if (init > WIN_INIT)
- init = WIN_INIT;
- zmemzero(s->window + curr, (unsigned)init);
- s->high_water = curr + init;
- }
- else if (s->high_water < (ulg)curr + WIN_INIT) {
- /* High water mark at or above current data, but below current data
- * plus WIN_INIT -- zero out to current data plus WIN_INIT, or up
- * to end of window, whichever is less.
- */
- init = (ulg)curr + WIN_INIT - s->high_water;
- if (init > s->window_size - s->high_water)
- init = s->window_size - s->high_water;
- zmemzero(s->window + s->high_water, (unsigned)init);
- s->high_water += init;
- }
- }
-
- Assert((ulg)s->strstart <= s->window_size - MIN_LOOKAHEAD,
- "not enough room for search");
-}
-
/* ===========================================================================
* Flush the current block, with given end-of-file flag.
* IN assertion: strstart is set to the end of the current match.
@@ -1744,10 +1653,7 @@ local void fill_window(s)
* copied. It is most efficient with large input and output buffers, which
* maximizes the opportunities to have a single copy from next_in to next_out.
*/
-local block_state deflate_stored(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_stored(deflate_state *s, int flush) {
/* Smallest worthy block size when not flushing or finishing. By default
* this is 32K. This can be as small as 507 bytes for memLevel == 1. For
* large input and output buffers, the stored block size will be larger.
@@ -1931,10 +1837,7 @@ local block_state deflate_stored(s, flush)
* new strings in the dictionary only for unmatched strings or for short
* matches. It is used only for the fast compression options.
*/
-local block_state deflate_fast(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_fast(deflate_state *s, int flush) {
IPos hash_head; /* head of the hash chain */
int bflush; /* set if current block must be flushed */
@@ -2036,10 +1939,7 @@ local block_state deflate_fast(s, flush)
* evaluation for matches: a match is finally adopted only if there is
* no better match at the next window position.
*/
-local block_state deflate_slow(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_slow(deflate_state *s, int flush) {
IPos hash_head; /* head of hash chain */
int bflush; /* set if current block must be flushed */
@@ -2173,10 +2073,7 @@ local block_state deflate_slow(s, flush)
* one. Do not maintain a hash table. (It will be regenerated if this run of
* deflate switches away from Z_RLE.)
*/
-local block_state deflate_rle(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_rle(deflate_state *s, int flush) {
int bflush; /* set if current block must be flushed */
uInt prev; /* byte at distance one to match */
Bytef *scan, *strend; /* scan goes up to strend for length of run */
@@ -2247,10 +2144,7 @@ local block_state deflate_rle(s, flush)
* For Z_HUFFMAN_ONLY, do not look for matches. Do not maintain a hash table.
* (It will be regenerated if this run of deflate switches away from Huffman.)
*/
-local block_state deflate_huff(s, flush)
- deflate_state *s;
- int flush;
-{
+local block_state deflate_huff(deflate_state *s, int flush) {
int bflush; /* set if current block must be flushed */
for (;;) {
diff --git a/deps/zlib/deflate.h b/deps/zlib/deflate.h
index ad3cef7e45da4f..f164191106922d 100644
--- a/deps/zlib/deflate.h
+++ b/deps/zlib/deflate.h
@@ -296,14 +296,14 @@ typedef struct internal_state {
memory checker errors from longest match routines */
/* in trees.c */
-void ZLIB_INTERNAL _tr_init OF((deflate_state *s));
-int ZLIB_INTERNAL _tr_tally OF((deflate_state *s, unsigned dist, unsigned lc));
-void ZLIB_INTERNAL _tr_flush_block OF((deflate_state *s, charf *buf,
- ulg stored_len, int last));
-void ZLIB_INTERNAL _tr_flush_bits OF((deflate_state *s));
-void ZLIB_INTERNAL _tr_align OF((deflate_state *s));
-void ZLIB_INTERNAL _tr_stored_block OF((deflate_state *s, charf *buf,
- ulg stored_len, int last));
+void ZLIB_INTERNAL _tr_init(deflate_state *s);
+int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc);
+void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last);
+void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s);
+void ZLIB_INTERNAL _tr_align(deflate_state *s);
+void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last);
#define d_code(dist) \
((dist) < 256 ? _dist_code[dist] : _dist_code[256+((dist)>>7)])
diff --git a/deps/zlib/gzclose.c b/deps/zlib/gzclose.c
index caeb99a3177f47..48d6a86f04b6ea 100644
--- a/deps/zlib/gzclose.c
+++ b/deps/zlib/gzclose.c
@@ -8,9 +8,7 @@
/* gzclose() is in a separate file so that it is linked in only if it is used.
That way the other gzclose functions can be used instead to avoid linking in
unneeded compression or decompression routines. */
-int ZEXPORT gzclose(file)
- gzFile file;
-{
+int ZEXPORT gzclose(gzFile file) {
#ifndef NO_GZCOMPRESS
gz_statep state;
diff --git a/deps/zlib/gzguts.h b/deps/zlib/gzguts.h
index 57faf37165a354..e23f831f531bb1 100644
--- a/deps/zlib/gzguts.h
+++ b/deps/zlib/gzguts.h
@@ -119,8 +119,8 @@
/* gz* functions always use library allocation functions */
#ifndef STDC
- extern voidp malloc OF((uInt size));
- extern void free OF((voidpf ptr));
+ extern voidp malloc(uInt size);
+ extern void free(voidpf ptr);
#endif
/* get errno and strerror definition */
@@ -138,10 +138,10 @@
/* provide prototypes for these when building zlib without LFS */
#if !defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off64_t ZEXPORT gzseek64(gzFile, z_off64_t, int);
+ ZEXTERN z_off64_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off64_t ZEXPORT gzoffset64(gzFile);
#endif
/* default memLevel */
@@ -203,9 +203,9 @@ typedef struct {
typedef gz_state FAR *gz_statep;
/* shared functions */
-void ZLIB_INTERNAL gz_error OF((gz_statep, int, const char *));
+void ZLIB_INTERNAL gz_error(gz_statep, int, const char *);
#if defined UNDER_CE
-char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
+char ZLIB_INTERNAL *gz_strwinerror(DWORD error);
#endif
/* GT_OFF(x), where x is an unsigned value, is true if x > maximum z_off64_t
@@ -214,6 +214,6 @@ char ZLIB_INTERNAL *gz_strwinerror OF((DWORD error));
#ifdef INT_MAX
# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > INT_MAX)
#else
-unsigned ZLIB_INTERNAL gz_intmax OF((void));
+unsigned ZLIB_INTERNAL gz_intmax(void);
# define GT_OFF(x) (sizeof(int) == sizeof(z_off64_t) && (x) > gz_intmax())
#endif
diff --git a/deps/zlib/gzlib.c b/deps/zlib/gzlib.c
index bbdb797e8079d8..9810e3553e0263 100644
--- a/deps/zlib/gzlib.c
+++ b/deps/zlib/gzlib.c
@@ -18,10 +18,6 @@
#endif
#endif
-/* Local functions */
-local void gz_reset OF((gz_statep));
-local gzFile gz_open OF((const void *, int, const char *));
-
#if defined UNDER_CE
/* Map the Windows error number in ERROR to a locale-dependent error message
@@ -33,9 +29,7 @@ local gzFile gz_open OF((const void *, int, const char *));
The gz_strwinerror function does not change the current setting of
GetLastError. */
-char ZLIB_INTERNAL *gz_strwinerror(error)
- DWORD error;
-{
+char ZLIB_INTERNAL *gz_strwinerror(DWORD error) {
static char buf[1024];
wchar_t *msgbuf;
@@ -75,9 +69,7 @@ char ZLIB_INTERNAL *gz_strwinerror(error)
#endif /* UNDER_CE */
/* Reset gzip file state */
-local void gz_reset(state)
- gz_statep state;
-{
+local void gz_reset(gz_statep state) {
state->x.have = 0; /* no output data available */
if (state->mode == GZ_READ) { /* for reading ... */
state->eof = 0; /* not at end of file */
@@ -93,11 +85,7 @@ local void gz_reset(state)
}
/* Open a gzip file either by name or file descriptor. */
-local gzFile gz_open(path, fd, mode)
- const void *path;
- int fd;
- const char *mode;
-{
+local gzFile gz_open(const void *path, int fd, const char *mode) {
gz_statep state;
z_size_t len;
int oflag;
@@ -272,26 +260,17 @@ local gzFile gz_open(path, fd, mode)
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzopen(path, mode)
- const char *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen(const char *path, const char *mode) {
return gz_open(path, -1, mode);
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzopen64(path, mode)
- const char *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen64(const char *path, const char *mode) {
return gz_open(path, -1, mode);
}
/* -- see zlib.h -- */
-gzFile ZEXPORT gzdopen(fd, mode)
- int fd;
- const char *mode;
-{
+gzFile ZEXPORT gzdopen(int fd, const char *mode) {
char *path; /* identifier for error messages */
gzFile gz;
@@ -309,19 +288,13 @@ gzFile ZEXPORT gzdopen(fd, mode)
/* -- see zlib.h -- */
#ifdef WIDECHAR
-gzFile ZEXPORT gzopen_w(path, mode)
- const wchar_t *path;
- const char *mode;
-{
+gzFile ZEXPORT gzopen_w(const wchar_t *path, const char *mode) {
return gz_open(path, -2, mode);
}
#endif
/* -- see zlib.h -- */
-int ZEXPORT gzbuffer(file, size)
- gzFile file;
- unsigned size;
-{
+int ZEXPORT gzbuffer(gzFile file, unsigned size) {
gz_statep state;
/* get internal structure and check integrity */
@@ -345,9 +318,7 @@ int ZEXPORT gzbuffer(file, size)
}
/* -- see zlib.h -- */
-int ZEXPORT gzrewind(file)
- gzFile file;
-{
+int ZEXPORT gzrewind(gzFile file) {
gz_statep state;
/* get internal structure */
@@ -368,11 +339,7 @@ int ZEXPORT gzrewind(file)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gzseek64(file, offset, whence)
- gzFile file;
- z_off64_t offset;
- int whence;
-{
+z_off64_t ZEXPORT gzseek64(gzFile file, z_off64_t offset, int whence) {
unsigned n;
z_off64_t ret;
gz_statep state;
@@ -445,11 +412,7 @@ z_off64_t ZEXPORT gzseek64(file, offset, whence)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gzseek(file, offset, whence)
- gzFile file;
- z_off_t offset;
- int whence;
-{
+z_off_t ZEXPORT gzseek(gzFile file, z_off_t offset, int whence) {
z_off64_t ret;
ret = gzseek64(file, (z_off64_t)offset, whence);
@@ -457,9 +420,7 @@ z_off_t ZEXPORT gzseek(file, offset, whence)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gztell64(file)
- gzFile file;
-{
+z_off64_t ZEXPORT gztell64(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -474,9 +435,7 @@ z_off64_t ZEXPORT gztell64(file)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gztell(file)
- gzFile file;
-{
+z_off_t ZEXPORT gztell(gzFile file) {
z_off64_t ret;
ret = gztell64(file);
@@ -484,9 +443,7 @@ z_off_t ZEXPORT gztell(file)
}
/* -- see zlib.h -- */
-z_off64_t ZEXPORT gzoffset64(file)
- gzFile file;
-{
+z_off64_t ZEXPORT gzoffset64(gzFile file) {
z_off64_t offset;
gz_statep state;
@@ -507,9 +464,7 @@ z_off64_t ZEXPORT gzoffset64(file)
}
/* -- see zlib.h -- */
-z_off_t ZEXPORT gzoffset(file)
- gzFile file;
-{
+z_off_t ZEXPORT gzoffset(gzFile file) {
z_off64_t ret;
ret = gzoffset64(file);
@@ -517,9 +472,7 @@ z_off_t ZEXPORT gzoffset(file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzeof(file)
- gzFile file;
-{
+int ZEXPORT gzeof(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -534,10 +487,7 @@ int ZEXPORT gzeof(file)
}
/* -- see zlib.h -- */
-const char * ZEXPORT gzerror(file, errnum)
- gzFile file;
- int *errnum;
-{
+const char * ZEXPORT gzerror(gzFile file, int *errnum) {
gz_statep state;
/* get internal structure and check integrity */
@@ -555,9 +505,7 @@ const char * ZEXPORT gzerror(file, errnum)
}
/* -- see zlib.h -- */
-void ZEXPORT gzclearerr(file)
- gzFile file;
-{
+void ZEXPORT gzclearerr(gzFile file) {
gz_statep state;
/* get internal structure and check integrity */
@@ -581,11 +529,7 @@ void ZEXPORT gzclearerr(file)
memory). Simply save the error message as a static string. If there is an
allocation failure constructing the error message, then convert the error to
out of memory. */
-void ZLIB_INTERNAL gz_error(state, err, msg)
- gz_statep state;
- int err;
- const char *msg;
-{
+void ZLIB_INTERNAL gz_error(gz_statep state, int err, const char *msg) {
/* free previously allocated message and clear */
if (state->msg != NULL) {
if (state->err != Z_MEM_ERROR)
@@ -627,8 +571,7 @@ void ZLIB_INTERNAL gz_error(state, err, msg)
available) -- we need to do this to cover cases where 2's complement not
used, since C standard permits 1's complement and sign-bit representations,
otherwise we could just use ((unsigned)-1) >> 1 */
-unsigned ZLIB_INTERNAL gz_intmax()
-{
+unsigned ZLIB_INTERNAL gz_intmax(void) {
unsigned p, q;
p = 1;
diff --git a/deps/zlib/gzread.c b/deps/zlib/gzread.c
index 9449a79289b437..9a9f7847ce4cc6 100644
--- a/deps/zlib/gzread.c
+++ b/deps/zlib/gzread.c
@@ -5,25 +5,12 @@
#include "gzguts.h"
-/* Local functions */
-local int gz_load OF((gz_statep, unsigned char *, unsigned, unsigned *));
-local int gz_avail OF((gz_statep));
-local int gz_look OF((gz_statep));
-local int gz_decomp OF((gz_statep));
-local int gz_fetch OF((gz_statep));
-local int gz_skip OF((gz_statep, z_off64_t));
-local z_size_t gz_read OF((gz_statep, voidp, z_size_t));
-
/* Use read() to load a buffer -- return -1 on error, otherwise 0. Read from
state->fd, and update state->eof, state->err, and state->msg as appropriate.
This function needs to loop on read(), since read() is not guaranteed to
read the number of bytes requested, depending on the type of descriptor. */
-local int gz_load(state, buf, len, have)
- gz_statep state;
- unsigned char *buf;
- unsigned len;
- unsigned *have;
-{
+local int gz_load(gz_statep state, unsigned char *buf, unsigned len,
+ unsigned *have) {
int ret;
unsigned get, max = ((unsigned)-1 >> 2) + 1;
@@ -53,9 +40,7 @@ local int gz_load(state, buf, len, have)
If strm->avail_in != 0, then the current data is moved to the beginning of
the input buffer, and then the remainder of the buffer is loaded with the
available data from the input file. */
-local int gz_avail(state)
- gz_statep state;
-{
+local int gz_avail(gz_statep state) {
unsigned got;
z_streamp strm = &(state->strm);
@@ -88,9 +73,7 @@ local int gz_avail(state)
case, all further file reads will be directly to either the output buffer or
a user buffer. If decompressing, the inflate state will be initialized.
gz_look() will return 0 on success or -1 on failure. */
-local int gz_look(state)
- gz_statep state;
-{
+local int gz_look(gz_statep state) {
z_streamp strm = &(state->strm);
/* allocate read buffers and inflate memory */
@@ -170,9 +153,7 @@ local int gz_look(state)
data. If the gzip stream completes, state->how is reset to LOOK to look for
the next gzip stream or raw data, once state->x.have is depleted. Returns 0
on success, -1 on failure. */
-local int gz_decomp(state)
- gz_statep state;
-{
+local int gz_decomp(gz_statep state) {
int ret = Z_OK;
unsigned had;
z_streamp strm = &(state->strm);
@@ -224,9 +205,7 @@ local int gz_decomp(state)
looked for to determine whether to copy or decompress. Returns -1 on error,
otherwise 0. gz_fetch() will leave state->how as COPY or GZIP unless the
end of the input file has been reached and all data has been processed. */
-local int gz_fetch(state)
- gz_statep state;
-{
+local int gz_fetch(gz_statep state) {
z_streamp strm = &(state->strm);
do {
@@ -254,10 +233,7 @@ local int gz_fetch(state)
}
/* Skip len uncompressed bytes of output. Return -1 on error, 0 on success. */
-local int gz_skip(state, len)
- gz_statep state;
- z_off64_t len;
-{
+local int gz_skip(gz_statep state, z_off64_t len) {
unsigned n;
/* skip over len bytes or reach end-of-file, whichever comes first */
@@ -289,11 +265,7 @@ local int gz_skip(state, len)
input. Return the number of bytes read. If zero is returned, either the
end of file was reached, or there was an error. state->err must be
consulted in that case to determine which. */
-local z_size_t gz_read(state, buf, len)
- gz_statep state;
- voidp buf;
- z_size_t len;
-{
+local z_size_t gz_read(gz_statep state, voidp buf, z_size_t len) {
z_size_t got;
unsigned n;
@@ -370,11 +342,7 @@ local z_size_t gz_read(state, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzread(file, buf, len)
- gzFile file;
- voidp buf;
- unsigned len;
-{
+int ZEXPORT gzread(gzFile file, voidp buf, unsigned len) {
gz_statep state;
/* get internal structure */
@@ -406,12 +374,7 @@ int ZEXPORT gzread(file, buf, len)
}
/* -- see zlib.h -- */
-z_size_t ZEXPORT gzfread(buf, size, nitems, file)
- voidp buf;
- z_size_t size;
- z_size_t nitems;
- gzFile file;
-{
+z_size_t ZEXPORT gzfread(voidp buf, z_size_t size, z_size_t nitems, gzFile file) {
z_size_t len;
gz_statep state;
@@ -446,9 +409,7 @@ z_size_t ZEXPORT gzfread(buf, size, nitems, file)
# endif
#endif
-int ZEXPORT gzgetc(file)
- gzFile file;
-{
+int ZEXPORT gzgetc(gzFile file) {
unsigned char buf[1];
gz_statep state;
@@ -473,17 +434,12 @@ int ZEXPORT gzgetc(file)
return gz_read(state, buf, 1) < 1 ? -1 : buf[0];
}
-int ZEXPORT gzgetc_(file)
-gzFile file;
-{
+int ZEXPORT gzgetc_(gzFile file) {
return gzgetc(file);
}
/* -- see zlib.h -- */
-int ZEXPORT gzungetc(c, file)
- int c;
- gzFile file;
-{
+int ZEXPORT gzungetc(int c, gzFile file) {
gz_statep state;
/* get internal structure */
@@ -540,11 +496,7 @@ int ZEXPORT gzungetc(c, file)
}
/* -- see zlib.h -- */
-char * ZEXPORT gzgets(file, buf, len)
- gzFile file;
- char *buf;
- int len;
-{
+char * ZEXPORT gzgets(gzFile file, char *buf, int len) {
unsigned left, n;
char *str;
unsigned char *eol;
@@ -604,9 +556,7 @@ char * ZEXPORT gzgets(file, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzdirect(file)
- gzFile file;
-{
+int ZEXPORT gzdirect(gzFile file) {
gz_statep state;
/* get internal structure */
@@ -624,9 +574,7 @@ int ZEXPORT gzdirect(file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzclose_r(file)
- gzFile file;
-{
+int ZEXPORT gzclose_r(gzFile file) {
int ret, err;
gz_statep state;
diff --git a/deps/zlib/gzwrite.c b/deps/zlib/gzwrite.c
index 3030d74d6176c7..435b4621b5349f 100644
--- a/deps/zlib/gzwrite.c
+++ b/deps/zlib/gzwrite.c
@@ -5,18 +5,10 @@
#include "gzguts.h"
-/* Local functions */
-local int gz_init OF((gz_statep));
-local int gz_comp OF((gz_statep, int));
-local int gz_zero OF((gz_statep, z_off64_t));
-local z_size_t gz_write OF((gz_statep, voidpc, z_size_t));
-
/* Initialize state for writing a gzip file. Mark initialization by setting
state->size to non-zero. Return -1 on a memory allocation failure, or 0 on
success. */
-local int gz_init(state)
- gz_statep state;
-{
+local int gz_init(gz_statep state) {
int ret;
z_streamp strm = &(state->strm);
@@ -70,10 +62,7 @@ local int gz_init(state)
deflate() flush value. If flush is Z_FINISH, then the deflate() state is
reset to start a new gzip stream. If gz->direct is true, then simply write
to the output file without compressing, and ignore flush. */
-local int gz_comp(state, flush)
- gz_statep state;
- int flush;
-{
+local int gz_comp(gz_statep state, int flush) {
int ret, writ;
unsigned have, put, max = ((unsigned)-1 >> 2) + 1;
z_streamp strm = &(state->strm);
@@ -151,10 +140,7 @@ local int gz_comp(state, flush)
/* Compress len zeros to output. Return -1 on a write error or memory
allocation failure by gz_comp(), or 0 on success. */
-local int gz_zero(state, len)
- gz_statep state;
- z_off64_t len;
-{
+local int gz_zero(gz_statep state, z_off64_t len) {
int first;
unsigned n;
z_streamp strm = &(state->strm);
@@ -184,11 +170,7 @@ local int gz_zero(state, len)
/* Write len bytes from buf to file. Return the number of bytes written. If
the returned value is less than len, then there was an error. */
-local z_size_t gz_write(state, buf, len)
- gz_statep state;
- voidpc buf;
- z_size_t len;
-{
+local z_size_t gz_write(gz_statep state, voidpc buf, z_size_t len) {
z_size_t put = len;
/* if len is zero, avoid unnecessary operations */
@@ -252,11 +234,7 @@ local z_size_t gz_write(state, buf, len)
}
/* -- see zlib.h -- */
-int ZEXPORT gzwrite(file, buf, len)
- gzFile file;
- voidpc buf;
- unsigned len;
-{
+int ZEXPORT gzwrite(gzFile file, voidpc buf, unsigned len) {
gz_statep state;
/* get internal structure */
@@ -280,12 +258,8 @@ int ZEXPORT gzwrite(file, buf, len)
}
/* -- see zlib.h -- */
-z_size_t ZEXPORT gzfwrite(buf, size, nitems, file)
- voidpc buf;
- z_size_t size;
- z_size_t nitems;
- gzFile file;
-{
+z_size_t ZEXPORT gzfwrite(voidpc buf, z_size_t size, z_size_t nitems,
+ gzFile file) {
z_size_t len;
gz_statep state;
@@ -310,10 +284,7 @@ z_size_t ZEXPORT gzfwrite(buf, size, nitems, file)
}
/* -- see zlib.h -- */
-int ZEXPORT gzputc(file, c)
- gzFile file;
- int c;
-{
+int ZEXPORT gzputc(gzFile file, int c) {
unsigned have;
unsigned char buf[1];
gz_statep state;
@@ -358,10 +329,7 @@ int ZEXPORT gzputc(file, c)
}
/* -- see zlib.h -- */
-int ZEXPORT gzputs(file, s)
- gzFile file;
- const char *s;
-{
+int ZEXPORT gzputs(gzFile file, const char *s) {
z_size_t len, put;
gz_statep state;
@@ -388,8 +356,7 @@ int ZEXPORT gzputs(file, s)
#include
/* -- see zlib.h -- */
-int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va)
-{
+int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va) {
int len;
unsigned left;
char *next;
@@ -460,8 +427,7 @@ int ZEXPORTVA gzvprintf(gzFile file, const char *format, va_list va)
return len;
}
-int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
-{
+int ZEXPORTVA gzprintf(gzFile file, const char *format, ...) {
va_list va;
int ret;
@@ -474,13 +440,10 @@ int ZEXPORTVA gzprintf(gzFile file, const char *format, ...)
#else /* !STDC && !Z_HAVE_STDARG_H */
/* -- see zlib.h -- */
-int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
- a11, a12, a13, a14, a15, a16, a17, a18, a19, a20)
- gzFile file;
- const char *format;
- int a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
- a11, a12, a13, a14, a15, a16, a17, a18, a19, a20;
-{
+int ZEXPORTVA gzprintf(gzFile file, const char *format, int a1, int a2, int a3,
+ int a4, int a5, int a6, int a7, int a8, int a9, int a10,
+ int a11, int a12, int a13, int a14, int a15, int a16,
+ int a17, int a18, int a19, int a20) {
unsigned len, left;
char *next;
gz_statep state;
@@ -562,10 +525,7 @@ int ZEXPORTVA gzprintf(file, format, a1, a2, a3, a4, a5, a6, a7, a8, a9, a10,
#endif
/* -- see zlib.h -- */
-int ZEXPORT gzflush(file, flush)
- gzFile file;
- int flush;
-{
+int ZEXPORT gzflush(gzFile file, int flush) {
gz_statep state;
/* get internal structure */
@@ -594,11 +554,7 @@ int ZEXPORT gzflush(file, flush)
}
/* -- see zlib.h -- */
-int ZEXPORT gzsetparams(file, level, strategy)
- gzFile file;
- int level;
- int strategy;
-{
+int ZEXPORT gzsetparams(gzFile file, int level, int strategy) {
gz_statep state;
z_streamp strm;
@@ -636,9 +592,7 @@ int ZEXPORT gzsetparams(file, level, strategy)
}
/* -- see zlib.h -- */
-int ZEXPORT gzclose_w(file)
- gzFile file;
-{
+int ZEXPORT gzclose_w(gzFile file) {
int ret = Z_OK;
gz_statep state;
diff --git a/deps/zlib/infback.c b/deps/zlib/infback.c
index cba8fda6fd40ce..9bfc30fe555f57 100644
--- a/deps/zlib/infback.c
+++ b/deps/zlib/infback.c
@@ -15,9 +15,6 @@
#include "inflate.h"
#include "inffast.h"
-/* function prototypes */
-local void fixedtables OF((struct inflate_state FAR *state));
-
/*
strm provides memory allocation functions in zalloc and zfree, or
Z_NULL to use the library memory allocation functions.
@@ -25,13 +22,9 @@ local void fixedtables OF((struct inflate_state FAR *state));
windowBits is in the range 8..15, and window is a user-supplied
window and output buffer that is 2**windowBits bytes.
*/
-int ZEXPORT inflateBackInit_(strm, windowBits, window, version, stream_size)
-z_streamp strm;
-int windowBits;
-unsigned char FAR *window;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateBackInit_(z_streamp strm, int windowBits,
+ unsigned char FAR *window, const char *version,
+ int stream_size) {
struct inflate_state FAR *state;
if (version == Z_NULL || version[0] != ZLIB_VERSION[0] ||
@@ -80,9 +73,7 @@ int stream_size;
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
-local void fixedtables(state)
-struct inflate_state FAR *state;
-{
+local void fixedtables(struct inflate_state FAR *state) {
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
@@ -248,13 +239,8 @@ struct inflate_state FAR *state;
inflateBack() can also return Z_STREAM_ERROR if the input parameters
are not correct, i.e. strm is Z_NULL or the state was not initialized.
*/
-int ZEXPORT inflateBack(strm, in, in_desc, out, out_desc)
-z_streamp strm;
-in_func in;
-void FAR *in_desc;
-out_func out;
-void FAR *out_desc;
-{
+int ZEXPORT inflateBack(z_streamp strm, in_func in, void FAR *in_desc,
+ out_func out, void FAR *out_desc) {
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
@@ -633,9 +619,7 @@ void FAR *out_desc;
return ret;
}
-int ZEXPORT inflateBackEnd(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateBackEnd(z_streamp strm) {
if (strm == Z_NULL || strm->state == Z_NULL || strm->zfree == (free_func)0)
return Z_STREAM_ERROR;
ZFREE(strm, strm->state);
diff --git a/deps/zlib/inffast.c b/deps/zlib/inffast.c
index d89ad5ccdee0e2..52ed8ca2df612e 100644
--- a/deps/zlib/inffast.c
+++ b/deps/zlib/inffast.c
@@ -51,10 +51,7 @@
requires strm->avail_out >= 258 for each loop to avoid checking for
available output space while decoding.
*/
-void ZLIB_INTERNAL inflate_fast(strm, start)
-z_streamp strm;
-unsigned start; /* inflate()'s starting value for strm->avail_out */
-{
+void ZLIB_INTERNAL inflate_fast(z_streamp strm, unsigned start) {
struct inflate_state FAR *state;
z_const unsigned char FAR *in; /* local strm->next_in */
z_const unsigned char FAR *last; /* have enough input while in < last */
diff --git a/deps/zlib/inffast.h b/deps/zlib/inffast.h
index c7c1c09808e5e9..9ff6ce3b6a96e5 100644
--- a/deps/zlib/inffast.h
+++ b/deps/zlib/inffast.h
@@ -23,4 +23,4 @@
*/
#define INFLATE_FAST_MIN_OUTPUT 258
-void ZLIB_INTERNAL inflate_fast OF((z_streamp strm, unsigned start));
+void ZLIB_INTERNAL inflate_fast(z_streamp strm, unsigned start);
diff --git a/deps/zlib/inflate.c b/deps/zlib/inflate.c
index ada86f1a1ebaa8..5abbd07464ab64 100644
--- a/deps/zlib/inflate.c
+++ b/deps/zlib/inflate.c
@@ -91,20 +91,7 @@
# endif
#endif
-/* function prototypes */
-local int inflateStateCheck OF((z_streamp strm));
-local void fixedtables OF((struct inflate_state FAR *state));
-local int updatewindow OF((z_streamp strm, const unsigned char FAR *end,
- unsigned copy));
-#ifdef BUILDFIXED
- void makefixed OF((void));
-#endif
-local unsigned syncsearch OF((unsigned FAR *have, const unsigned char FAR *buf,
- unsigned len));
-
-local int inflateStateCheck(strm)
-z_streamp strm;
-{
+local int inflateStateCheck(z_streamp strm) {
struct inflate_state FAR *state;
if (strm == Z_NULL ||
strm->zalloc == (alloc_func)0 || strm->zfree == (free_func)0)
@@ -116,9 +103,7 @@ z_streamp strm;
return 0;
}
-int ZEXPORT inflateResetKeep(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateResetKeep(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -142,9 +127,7 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateReset(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateReset(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -155,10 +138,7 @@ z_streamp strm;
return inflateResetKeep(strm);
}
-int ZEXPORT inflateReset2(strm, windowBits)
-z_streamp strm;
-int windowBits;
-{
+int ZEXPORT inflateReset2(z_streamp strm, int windowBits) {
int wrap;
struct inflate_state FAR *state;
@@ -195,12 +175,8 @@ int windowBits;
return inflateReset(strm);
}
-int ZEXPORT inflateInit2_(strm, windowBits, version, stream_size)
-z_streamp strm;
-int windowBits;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit2_(z_streamp strm, int windowBits,
+ const char *version, int stream_size) {
int ret;
struct inflate_state FAR *state;
@@ -240,19 +216,12 @@ int stream_size;
return ret;
}
-int ZEXPORT inflateInit_(strm, version, stream_size)
-z_streamp strm;
-const char *version;
-int stream_size;
-{
+int ZEXPORT inflateInit_(z_streamp strm, const char *version,
+ int stream_size) {
return inflateInit2_(strm, DEF_WBITS, version, stream_size);
}
-int ZEXPORT inflatePrime(strm, bits, value)
-z_streamp strm;
-int bits;
-int value;
-{
+int ZEXPORT inflatePrime(z_streamp strm, int bits, int value) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -281,9 +250,7 @@ int value;
used for threaded applications, since the rewriting of the tables and virgin
may not be thread-safe.
*/
-local void fixedtables(state)
-struct inflate_state FAR *state;
-{
+local void fixedtables(struct inflate_state FAR *state) {
#ifdef BUILDFIXED
static int virgin = 1;
static code *lenfix, *distfix;
@@ -345,7 +312,7 @@ struct inflate_state FAR *state;
a.out > inffixed.h
*/
-void makefixed()
+void makefixed(void)
{
unsigned low, size;
struct inflate_state state;
@@ -399,11 +366,7 @@ void makefixed()
output will fall in the output data, making match copies simpler and faster.
The advantage may be dependent on the size of the processor's data caches.
*/
-local int updatewindow(strm, end, copy)
-z_streamp strm;
-const Bytef *end;
-unsigned copy;
-{
+local int updatewindow(z_streamp strm, const Bytef *end, unsigned copy) {
struct inflate_state FAR *state;
unsigned dist;
@@ -625,10 +588,7 @@ unsigned copy;
will return Z_BUF_ERROR if it has not reached the end of the stream.
*/
-int ZEXPORT inflate(strm, flush)
-z_streamp strm;
-int flush;
-{
+int ZEXPORT inflate(z_streamp strm, int flush) {
struct inflate_state FAR *state;
z_const unsigned char FAR *next; /* next input */
unsigned char FAR *put; /* next output */
@@ -1305,9 +1265,7 @@ int flush;
return ret;
}
-int ZEXPORT inflateEnd(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateEnd(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
return Z_STREAM_ERROR;
@@ -1319,11 +1277,8 @@ z_streamp strm;
return Z_OK;
}
-int ZEXPORT inflateGetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-Bytef *dictionary;
-uInt *dictLength;
-{
+int ZEXPORT inflateGetDictionary(z_streamp strm, Bytef *dictionary,
+ uInt *dictLength) {
struct inflate_state FAR *state;
/* check state */
@@ -1342,11 +1297,8 @@ uInt *dictLength;
return Z_OK;
}
-int ZEXPORT inflateSetDictionary(strm, dictionary, dictLength)
-z_streamp strm;
-const Bytef *dictionary;
-uInt dictLength;
-{
+int ZEXPORT inflateSetDictionary(z_streamp strm, const Bytef *dictionary,
+ uInt dictLength) {
struct inflate_state FAR *state;
unsigned long dictid;
int ret;
@@ -1377,10 +1329,7 @@ uInt dictLength;
return Z_OK;
}
-int ZEXPORT inflateGetHeader(strm, head)
-z_streamp strm;
-gz_headerp head;
-{
+int ZEXPORT inflateGetHeader(z_streamp strm, gz_headerp head) {
struct inflate_state FAR *state;
/* check state */
@@ -1405,11 +1354,8 @@ gz_headerp head;
called again with more data and the *have state. *have is initialized to
zero for the first call.
*/
-local unsigned syncsearch(have, buf, len)
-unsigned FAR *have;
-const unsigned char FAR *buf;
-unsigned len;
-{
+local unsigned syncsearch(unsigned FAR *have, const unsigned char FAR *buf,
+ unsigned len) {
unsigned got;
unsigned next;
@@ -1428,9 +1374,7 @@ unsigned len;
return next;
}
-int ZEXPORT inflateSync(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSync(z_streamp strm) {
unsigned len; /* number of bytes to look at or looked at */
int flags; /* temporary to save header status */
unsigned long in, out; /* temporary to save total_in and total_out */
@@ -1486,9 +1430,7 @@ z_streamp strm;
block. When decompressing, PPP checks that at the end of input packet,
inflate is waiting for these length bytes.
*/
-int ZEXPORT inflateSyncPoint(strm)
-z_streamp strm;
-{
+int ZEXPORT inflateSyncPoint(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1496,10 +1438,7 @@ z_streamp strm;
return state->mode == STORED && state->bits == 0;
}
-int ZEXPORT inflateCopy(dest, source)
-z_streamp dest;
-z_streamp source;
-{
+int ZEXPORT inflateCopy(z_streamp dest, z_streamp source) {
struct inflate_state FAR *state;
struct inflate_state FAR *copy;
unsigned char FAR *window;
@@ -1543,10 +1482,7 @@ z_streamp source;
return Z_OK;
}
-int ZEXPORT inflateUndermine(strm, subvert)
-z_streamp strm;
-int subvert;
-{
+int ZEXPORT inflateUndermine(z_streamp strm, int subvert) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1561,10 +1497,7 @@ int subvert;
#endif
}
-int ZEXPORT inflateValidate(strm, check)
-z_streamp strm;
-int check;
-{
+int ZEXPORT inflateValidate(z_streamp strm, int check) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return Z_STREAM_ERROR;
@@ -1576,9 +1509,7 @@ int check;
return Z_OK;
}
-long ZEXPORT inflateMark(strm)
-z_streamp strm;
-{
+long ZEXPORT inflateMark(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm))
@@ -1589,9 +1520,7 @@ z_streamp strm;
(state->mode == MATCH ? state->was - state->length : 0));
}
-unsigned long ZEXPORT inflateCodesUsed(strm)
-z_streamp strm;
-{
+unsigned long ZEXPORT inflateCodesUsed(z_streamp strm) {
struct inflate_state FAR *state;
if (inflateStateCheck(strm)) return (unsigned long)-1;
state = (struct inflate_state FAR *)strm->state;
diff --git a/deps/zlib/inftrees.c b/deps/zlib/inftrees.c
index 0178ffafe1e0ed..afc4c4212d8022 100644
--- a/deps/zlib/inftrees.c
+++ b/deps/zlib/inftrees.c
@@ -29,14 +29,9 @@ const char inflate_copyright[] =
table index bits. It will differ if the request is greater than the
longest code or if it is less than the shortest code.
*/
-int ZLIB_INTERNAL inflate_table(type, lens, codes, table, bits, work)
-codetype type;
-unsigned short FAR *lens;
-unsigned codes;
-code FAR * FAR *table;
-unsigned FAR *bits;
-unsigned short FAR *work;
-{
+int ZLIB_INTERNAL inflate_table(codetype type, unsigned short FAR *lens,
+ unsigned codes, code FAR * FAR *table,
+ unsigned FAR *bits, unsigned short FAR *work) {
unsigned len; /* a code's length in bits */
unsigned sym; /* index of code symbols */
unsigned min, max; /* minimum and maximum code lengths */
diff --git a/deps/zlib/inftrees.h b/deps/zlib/inftrees.h
index 44b96b30c5aac5..f11f29c0761c15 100644
--- a/deps/zlib/inftrees.h
+++ b/deps/zlib/inftrees.h
@@ -57,6 +57,6 @@ typedef enum {
DISTS
} codetype;
-int ZLIB_INTERNAL inflate_table OF((codetype type, unsigned short FAR *lens,
- unsigned codes, code FAR * FAR *table,
- unsigned FAR *bits, unsigned short FAR *work));
+int ZLIB_INTERNAL inflate_table(codetype type, unsigned short FAR *lens,
+ unsigned codes, code FAR * FAR *table,
+ unsigned FAR *bits, unsigned short FAR *work);
diff --git a/deps/zlib/trees.c b/deps/zlib/trees.c
index 5f305c47221e90..8dbdc40bacce1d 100644
--- a/deps/zlib/trees.c
+++ b/deps/zlib/trees.c
@@ -122,39 +122,116 @@ struct static_tree_desc_s {
int max_length; /* max bit length for the codes */
};
-local const static_tree_desc static_l_desc =
+#ifdef NO_INIT_GLOBAL_POINTERS
+# define TCONST
+#else
+# define TCONST const
+#endif
+
+local TCONST static_tree_desc static_l_desc =
{static_ltree, extra_lbits, LITERALS+1, L_CODES, MAX_BITS};
-local const static_tree_desc static_d_desc =
+local TCONST static_tree_desc static_d_desc =
{static_dtree, extra_dbits, 0, D_CODES, MAX_BITS};
-local const static_tree_desc static_bl_desc =
+local TCONST static_tree_desc static_bl_desc =
{(const ct_data *)0, extra_blbits, 0, BL_CODES, MAX_BL_BITS};
/* ===========================================================================
- * Local (static) routines in this file.
+ * Output a short LSB first on the stream.
+ * IN assertion: there is enough room in pendingBuf.
+ */
+#define put_short(s, w) { \
+ put_byte(s, (uch)((w) & 0xff)); \
+ put_byte(s, (uch)((ush)(w) >> 8)); \
+}
+
+/* ===========================================================================
+ * Reverse the first len bits of a code, using straightforward code (a faster
+ * method would use a table)
+ * IN assertion: 1 <= len <= 15
*/
+local unsigned bi_reverse(unsigned code, int len) {
+ register unsigned res = 0;
+ do {
+ res |= code & 1;
+ code >>= 1, res <<= 1;
+ } while (--len > 0);
+ return res >> 1;
+}
-local void tr_static_init OF((void));
-local void init_block OF((deflate_state *s));
-local void pqdownheap OF((deflate_state *s, ct_data *tree, int k));
-local void gen_bitlen OF((deflate_state *s, tree_desc *desc));
-local void gen_codes OF((ct_data *tree, int max_code, ushf *bl_count));
-local void build_tree OF((deflate_state *s, tree_desc *desc));
-local void scan_tree OF((deflate_state *s, ct_data *tree, int max_code));
-local void send_tree OF((deflate_state *s, ct_data *tree, int max_code));
-local int build_bl_tree OF((deflate_state *s));
-local void send_all_trees OF((deflate_state *s, int lcodes, int dcodes,
- int blcodes));
-local void compress_block OF((deflate_state *s, const ct_data *ltree,
- const ct_data *dtree));
-local int detect_data_type OF((deflate_state *s));
-local unsigned bi_reverse OF((unsigned code, int len));
-local void bi_windup OF((deflate_state *s));
-local void bi_flush OF((deflate_state *s));
+/* ===========================================================================
+ * Flush the bit buffer, keeping at most 7 bits in it.
+ */
+local void bi_flush(deflate_state *s) {
+ if (s->bi_valid == 16) {
+ put_short(s, s->bi_buf);
+ s->bi_buf = 0;
+ s->bi_valid = 0;
+ } else if (s->bi_valid >= 8) {
+ put_byte(s, (Byte)s->bi_buf);
+ s->bi_buf >>= 8;
+ s->bi_valid -= 8;
+ }
+}
+
+/* ===========================================================================
+ * Flush the bit buffer and align the output on a byte boundary
+ */
+local void bi_windup(deflate_state *s) {
+ if (s->bi_valid > 8) {
+ put_short(s, s->bi_buf);
+ } else if (s->bi_valid > 0) {
+ put_byte(s, (Byte)s->bi_buf);
+ }
+ s->bi_buf = 0;
+ s->bi_valid = 0;
+#ifdef ZLIB_DEBUG
+ s->bits_sent = (s->bits_sent + 7) & ~7;
+#endif
+}
+
+/* ===========================================================================
+ * Generate the codes for a given tree and bit counts (which need not be
+ * optimal).
+ * IN assertion: the array bl_count contains the bit length statistics for
+ * the given tree and the field len is set for all tree elements.
+ * OUT assertion: the field code is set for all tree elements of non
+ * zero code length.
+ */
+local void gen_codes(ct_data *tree, int max_code, ushf *bl_count) {
+ ush next_code[MAX_BITS+1]; /* next code value for each bit length */
+ unsigned code = 0; /* running code value */
+ int bits; /* bit index */
+ int n; /* code index */
+
+ /* The distribution counts are first used to generate the code values
+ * without bit reversal.
+ */
+ for (bits = 1; bits <= MAX_BITS; bits++) {
+ code = (code + bl_count[bits - 1]) << 1;
+ next_code[bits] = (ush)code;
+ }
+ /* Check that the bit counts in bl_count are consistent. The last code
+ * must be all ones.
+ */
+ Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
+ "inconsistent bit counts");
+ Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
+
+ for (n = 0; n <= max_code; n++) {
+ int len = tree[n].Len;
+ if (len == 0) continue;
+ /* Now reverse the bits */
+ tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
+
+ Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
+ n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
+ }
+}
#ifdef GEN_TREES_H
-local void gen_trees_header OF((void));
+local void gen_trees_header(void);
#endif
#ifndef ZLIB_DEBUG
@@ -167,27 +244,12 @@ local void gen_trees_header OF((void));
send_bits(s, tree[c].Code, tree[c].Len); }
#endif
-/* ===========================================================================
- * Output a short LSB first on the stream.
- * IN assertion: there is enough room in pendingBuf.
- */
-#define put_short(s, w) { \
- put_byte(s, (uch)((w) & 0xff)); \
- put_byte(s, (uch)((ush)(w) >> 8)); \
-}
-
/* ===========================================================================
* Send a value on a given number of bits.
* IN assertion: length <= 16 and value fits in length bits.
*/
#ifdef ZLIB_DEBUG
-local void send_bits OF((deflate_state *s, int value, int length));
-
-local void send_bits(s, value, length)
- deflate_state *s;
- int value; /* value to send */
- int length; /* number of bits */
-{
+local void send_bits(deflate_state *s, int value, int length) {
Tracevv((stderr," l %2d v %4x ", length, value));
Assert(length > 0 && length <= 15, "invalid length");
s->bits_sent += (ulg)length;
@@ -229,8 +291,7 @@ local void send_bits(s, value, length)
/* ===========================================================================
* Initialize the various 'constant' tables.
*/
-local void tr_static_init()
-{
+local void tr_static_init(void) {
#if defined(GEN_TREES_H) || !defined(STDC)
static int static_init_done = 0;
int n; /* iterates over tree elements */
@@ -323,8 +384,7 @@ local void tr_static_init()
((i) == (last)? "\n};\n\n" : \
((i) % (width) == (width) - 1 ? ",\n" : ", "))
-void gen_trees_header()
-{
+void gen_trees_header(void) {
FILE *header = fopen("trees.h", "w");
int i;
@@ -373,12 +433,26 @@ void gen_trees_header()
}
#endif /* GEN_TREES_H */
+/* ===========================================================================
+ * Initialize a new block.
+ */
+local void init_block(deflate_state *s) {
+ int n; /* iterates over tree elements */
+
+ /* Initialize the trees. */
+ for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
+ for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
+ for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
+
+ s->dyn_ltree[END_BLOCK].Freq = 1;
+ s->opt_len = s->static_len = 0L;
+ s->sym_next = s->matches = 0;
+}
+
/* ===========================================================================
* Initialize the tree data structures for a new zlib stream.
*/
-void ZLIB_INTERNAL _tr_init(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_init(deflate_state *s) {
tr_static_init();
s->l_desc.dyn_tree = s->dyn_ltree;
@@ -401,24 +475,6 @@ void ZLIB_INTERNAL _tr_init(s)
init_block(s);
}
-/* ===========================================================================
- * Initialize a new block.
- */
-local void init_block(s)
- deflate_state *s;
-{
- int n; /* iterates over tree elements */
-
- /* Initialize the trees. */
- for (n = 0; n < L_CODES; n++) s->dyn_ltree[n].Freq = 0;
- for (n = 0; n < D_CODES; n++) s->dyn_dtree[n].Freq = 0;
- for (n = 0; n < BL_CODES; n++) s->bl_tree[n].Freq = 0;
-
- s->dyn_ltree[END_BLOCK].Freq = 1;
- s->opt_len = s->static_len = 0L;
- s->sym_next = s->matches = 0;
-}
-
#define SMALLEST 1
/* Index within the heap array of least frequent node in the Huffman tree */
@@ -448,11 +504,7 @@ local void init_block(s)
* when the heap property is re-established (each father smaller than its
* two sons).
*/
-local void pqdownheap(s, tree, k)
- deflate_state *s;
- ct_data *tree; /* the tree to restore */
- int k; /* node to move down */
-{
+local void pqdownheap(deflate_state *s, ct_data *tree, int k) {
int v = s->heap[k];
int j = k << 1; /* left son of k */
while (j <= s->heap_len) {
@@ -483,10 +535,7 @@ local void pqdownheap(s, tree, k)
* The length opt_len is updated; static_len is also updated if stree is
* not null.
*/
-local void gen_bitlen(s, desc)
- deflate_state *s;
- tree_desc *desc; /* the tree descriptor */
-{
+local void gen_bitlen(deflate_state *s, tree_desc *desc) {
ct_data *tree = desc->dyn_tree;
int max_code = desc->max_code;
const ct_data *stree = desc->stat_desc->static_tree;
@@ -561,48 +610,9 @@ local void gen_bitlen(s, desc)
}
}
-/* ===========================================================================
- * Generate the codes for a given tree and bit counts (which need not be
- * optimal).
- * IN assertion: the array bl_count contains the bit length statistics for
- * the given tree and the field len is set for all tree elements.
- * OUT assertion: the field code is set for all tree elements of non
- * zero code length.
- */
-local void gen_codes(tree, max_code, bl_count)
- ct_data *tree; /* the tree to decorate */
- int max_code; /* largest code with non zero frequency */
- ushf *bl_count; /* number of codes at each bit length */
-{
- ush next_code[MAX_BITS+1]; /* next code value for each bit length */
- unsigned code = 0; /* running code value */
- int bits; /* bit index */
- int n; /* code index */
-
- /* The distribution counts are first used to generate the code values
- * without bit reversal.
- */
- for (bits = 1; bits <= MAX_BITS; bits++) {
- code = (code + bl_count[bits - 1]) << 1;
- next_code[bits] = (ush)code;
- }
- /* Check that the bit counts in bl_count are consistent. The last code
- * must be all ones.
- */
- Assert (code + bl_count[MAX_BITS] - 1 == (1 << MAX_BITS) - 1,
- "inconsistent bit counts");
- Tracev((stderr,"\ngen_codes: max_code %d ", max_code));
-
- for (n = 0; n <= max_code; n++) {
- int len = tree[n].Len;
- if (len == 0) continue;
- /* Now reverse the bits */
- tree[n].Code = (ush)bi_reverse(next_code[len]++, len);
-
- Tracecv(tree != static_ltree, (stderr,"\nn %3d %c l %2d c %4x (%x) ",
- n, (isgraph(n) ? n : ' '), len, tree[n].Code, next_code[len] - 1));
- }
-}
+#ifdef DUMP_BL_TREE
+# include
+#endif
/* ===========================================================================
* Construct one Huffman tree and assigns the code bit strings and lengths.
@@ -612,10 +622,7 @@ local void gen_codes(tree, max_code, bl_count)
* and corresponding code. The length opt_len is updated; static_len is
* also updated if stree is not null. The field max_code is set.
*/
-local void build_tree(s, desc)
- deflate_state *s;
- tree_desc *desc; /* the tree descriptor */
-{
+local void build_tree(deflate_state *s, tree_desc *desc) {
ct_data *tree = desc->dyn_tree;
const ct_data *stree = desc->stat_desc->static_tree;
int elems = desc->stat_desc->elems;
@@ -700,11 +707,7 @@ local void build_tree(s, desc)
* Scan a literal or distance tree to determine the frequencies of the codes
* in the bit length tree.
*/
-local void scan_tree(s, tree, max_code)
- deflate_state *s;
- ct_data *tree; /* the tree to be scanned */
- int max_code; /* and its largest code of non zero frequency */
-{
+local void scan_tree(deflate_state *s, ct_data *tree, int max_code) {
int n; /* iterates over all tree elements */
int prevlen = -1; /* last emitted length */
int curlen; /* length of current code */
@@ -745,11 +748,7 @@ local void scan_tree(s, tree, max_code)
* Send a literal or distance tree in compressed form, using the codes in
* bl_tree.
*/
-local void send_tree(s, tree, max_code)
- deflate_state *s;
- ct_data *tree; /* the tree to be scanned */
- int max_code; /* and its largest code of non zero frequency */
-{
+local void send_tree(deflate_state *s, ct_data *tree, int max_code) {
int n; /* iterates over all tree elements */
int prevlen = -1; /* last emitted length */
int curlen; /* length of current code */
@@ -796,9 +795,7 @@ local void send_tree(s, tree, max_code)
* Construct the Huffman tree for the bit lengths and return the index in
* bl_order of the last bit length code to send.
*/
-local int build_bl_tree(s)
- deflate_state *s;
-{
+local int build_bl_tree(deflate_state *s) {
int max_blindex; /* index of last bit length code of non zero freq */
/* Determine the bit length frequencies for literal and distance trees */
@@ -831,10 +828,8 @@ local int build_bl_tree(s)
* lengths of the bit length codes, the literal tree and the distance tree.
* IN assertion: lcodes >= 257, dcodes >= 1, blcodes >= 4.
*/
-local void send_all_trees(s, lcodes, dcodes, blcodes)
- deflate_state *s;
- int lcodes, dcodes, blcodes; /* number of codes for each tree */
-{
+local void send_all_trees(deflate_state *s, int lcodes, int dcodes,
+ int blcodes) {
int rank; /* index in bl_order */
Assert (lcodes >= 257 && dcodes >= 1 && blcodes >= 4, "not enough codes");
@@ -860,12 +855,8 @@ local void send_all_trees(s, lcodes, dcodes, blcodes)
/* ===========================================================================
* Send a stored block
*/
-void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
- deflate_state *s;
- charf *buf; /* input block */
- ulg stored_len; /* length of input block */
- int last; /* one if this is the last block for a file */
-{
+void ZLIB_INTERNAL _tr_stored_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last) {
send_bits(s, (STORED_BLOCK<<1) + last, 3); /* send block type */
bi_windup(s); /* align on byte boundary */
put_short(s, (ush)stored_len);
@@ -884,9 +875,7 @@ void ZLIB_INTERNAL _tr_stored_block(s, buf, stored_len, last)
/* ===========================================================================
* Flush the bits in the bit buffer to pending output (leaves at most 7 bits)
*/
-void ZLIB_INTERNAL _tr_flush_bits(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_flush_bits(deflate_state *s) {
bi_flush(s);
}
@@ -894,9 +883,7 @@ void ZLIB_INTERNAL _tr_flush_bits(s)
* Send one empty static block to give enough lookahead for inflate.
* This takes 10 bits, of which 7 may remain in the bit buffer.
*/
-void ZLIB_INTERNAL _tr_align(s)
- deflate_state *s;
-{
+void ZLIB_INTERNAL _tr_align(deflate_state *s) {
send_bits(s, STATIC_TREES<<1, 3);
send_code(s, END_BLOCK, static_ltree);
#ifdef ZLIB_DEBUG
@@ -905,16 +892,99 @@ void ZLIB_INTERNAL _tr_align(s)
bi_flush(s);
}
+/* ===========================================================================
+ * Send the block data compressed using the given Huffman trees
+ */
+local void compress_block(deflate_state *s, const ct_data *ltree,
+ const ct_data *dtree) {
+ unsigned dist; /* distance of matched string */
+ int lc; /* match length or unmatched char (if dist == 0) */
+ unsigned sx = 0; /* running index in sym_buf */
+ unsigned code; /* the code to send */
+ int extra; /* number of extra bits to send */
+
+ if (s->sym_next != 0) do {
+ dist = s->sym_buf[sx++] & 0xff;
+ dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
+ lc = s->sym_buf[sx++];
+ if (dist == 0) {
+ send_code(s, lc, ltree); /* send a literal byte */
+ Tracecv(isgraph(lc), (stderr," '%c' ", lc));
+ } else {
+ /* Here, lc is the match length - MIN_MATCH */
+ code = _length_code[lc];
+ send_code(s, code + LITERALS + 1, ltree); /* send length code */
+ extra = extra_lbits[code];
+ if (extra != 0) {
+ lc -= base_length[code];
+ send_bits(s, lc, extra); /* send the extra length bits */
+ }
+ dist--; /* dist is now the match distance - 1 */
+ code = d_code(dist);
+ Assert (code < D_CODES, "bad d_code");
+
+ send_code(s, code, dtree); /* send the distance code */
+ extra = extra_dbits[code];
+ if (extra != 0) {
+ dist -= (unsigned)base_dist[code];
+ send_bits(s, dist, extra); /* send the extra distance bits */
+ }
+ } /* literal or match pair ? */
+
+ /* Check that the overlay between pending_buf and sym_buf is ok: */
+ Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
+
+ } while (sx < s->sym_next);
+
+ send_code(s, END_BLOCK, ltree);
+}
+
+/* ===========================================================================
+ * Check if the data type is TEXT or BINARY, using the following algorithm:
+ * - TEXT if the two conditions below are satisfied:
+ * a) There are no non-portable control characters belonging to the
+ * "block list" (0..6, 14..25, 28..31).
+ * b) There is at least one printable character belonging to the
+ * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
+ * - BINARY otherwise.
+ * - The following partially-portable control characters form a
+ * "gray list" that is ignored in this detection algorithm:
+ * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
+ * IN assertion: the fields Freq of dyn_ltree are set.
+ */
+local int detect_data_type(deflate_state *s) {
+ /* block_mask is the bit mask of block-listed bytes
+ * set bits 0..6, 14..25, and 28..31
+ * 0xf3ffc07f = binary 11110011111111111100000001111111
+ */
+ unsigned long block_mask = 0xf3ffc07fUL;
+ int n;
+
+ /* Check for non-textual ("block-listed") bytes. */
+ for (n = 0; n <= 31; n++, block_mask >>= 1)
+ if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0))
+ return Z_BINARY;
+
+ /* Check for textual ("allow-listed") bytes. */
+ if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
+ || s->dyn_ltree[13].Freq != 0)
+ return Z_TEXT;
+ for (n = 32; n < LITERALS; n++)
+ if (s->dyn_ltree[n].Freq != 0)
+ return Z_TEXT;
+
+ /* There are no "block-listed" or "allow-listed" bytes:
+ * this stream either is empty or has tolerated ("gray-listed") bytes only.
+ */
+ return Z_BINARY;
+}
+
/* ===========================================================================
* Determine the best encoding for the current block: dynamic trees, static
* trees or store, and write out the encoded block.
*/
-void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
- deflate_state *s;
- charf *buf; /* input block, or NULL if too old */
- ulg stored_len; /* length of input block */
- int last; /* one if this is the last block for a file */
-{
+void ZLIB_INTERNAL _tr_flush_block(deflate_state *s, charf *buf,
+ ulg stored_len, int last) {
ulg opt_lenb, static_lenb; /* opt_len and static_len in bytes */
int max_blindex = 0; /* index of last bit length code of non zero freq */
@@ -1011,11 +1081,7 @@ void ZLIB_INTERNAL _tr_flush_block(s, buf, stored_len, last)
* Save the match info and tally the frequency counts. Return true if
* the current block must be flushed.
*/
-int ZLIB_INTERNAL _tr_tally(s, dist, lc)
- deflate_state *s;
- unsigned dist; /* distance of matched string */
- unsigned lc; /* match length - MIN_MATCH or unmatched char (dist==0) */
-{
+int ZLIB_INTERNAL _tr_tally(deflate_state *s, unsigned dist, unsigned lc) {
s->sym_buf[s->sym_next++] = (uch)dist;
s->sym_buf[s->sym_next++] = (uch)(dist >> 8);
s->sym_buf[s->sym_next++] = (uch)lc;
@@ -1035,147 +1101,3 @@ int ZLIB_INTERNAL _tr_tally(s, dist, lc)
}
return (s->sym_next == s->sym_end);
}
-
-/* ===========================================================================
- * Send the block data compressed using the given Huffman trees
- */
-local void compress_block(s, ltree, dtree)
- deflate_state *s;
- const ct_data *ltree; /* literal tree */
- const ct_data *dtree; /* distance tree */
-{
- unsigned dist; /* distance of matched string */
- int lc; /* match length or unmatched char (if dist == 0) */
- unsigned sx = 0; /* running index in sym_buf */
- unsigned code; /* the code to send */
- int extra; /* number of extra bits to send */
-
- if (s->sym_next != 0) do {
- dist = s->sym_buf[sx++] & 0xff;
- dist += (unsigned)(s->sym_buf[sx++] & 0xff) << 8;
- lc = s->sym_buf[sx++];
- if (dist == 0) {
- send_code(s, lc, ltree); /* send a literal byte */
- Tracecv(isgraph(lc), (stderr," '%c' ", lc));
- } else {
- /* Here, lc is the match length - MIN_MATCH */
- code = _length_code[lc];
- send_code(s, code + LITERALS + 1, ltree); /* send length code */
- extra = extra_lbits[code];
- if (extra != 0) {
- lc -= base_length[code];
- send_bits(s, lc, extra); /* send the extra length bits */
- }
- dist--; /* dist is now the match distance - 1 */
- code = d_code(dist);
- Assert (code < D_CODES, "bad d_code");
-
- send_code(s, code, dtree); /* send the distance code */
- extra = extra_dbits[code];
- if (extra != 0) {
- dist -= (unsigned)base_dist[code];
- send_bits(s, dist, extra); /* send the extra distance bits */
- }
- } /* literal or match pair ? */
-
- /* Check that the overlay between pending_buf and sym_buf is ok: */
- Assert(s->pending < s->lit_bufsize + sx, "pendingBuf overflow");
-
- } while (sx < s->sym_next);
-
- send_code(s, END_BLOCK, ltree);
-}
-
-/* ===========================================================================
- * Check if the data type is TEXT or BINARY, using the following algorithm:
- * - TEXT if the two conditions below are satisfied:
- * a) There are no non-portable control characters belonging to the
- * "block list" (0..6, 14..25, 28..31).
- * b) There is at least one printable character belonging to the
- * "allow list" (9 {TAB}, 10 {LF}, 13 {CR}, 32..255).
- * - BINARY otherwise.
- * - The following partially-portable control characters form a
- * "gray list" that is ignored in this detection algorithm:
- * (7 {BEL}, 8 {BS}, 11 {VT}, 12 {FF}, 26 {SUB}, 27 {ESC}).
- * IN assertion: the fields Freq of dyn_ltree are set.
- */
-local int detect_data_type(s)
- deflate_state *s;
-{
- /* block_mask is the bit mask of block-listed bytes
- * set bits 0..6, 14..25, and 28..31
- * 0xf3ffc07f = binary 11110011111111111100000001111111
- */
- unsigned long block_mask = 0xf3ffc07fUL;
- int n;
-
- /* Check for non-textual ("block-listed") bytes. */
- for (n = 0; n <= 31; n++, block_mask >>= 1)
- if ((block_mask & 1) && (s->dyn_ltree[n].Freq != 0))
- return Z_BINARY;
-
- /* Check for textual ("allow-listed") bytes. */
- if (s->dyn_ltree[9].Freq != 0 || s->dyn_ltree[10].Freq != 0
- || s->dyn_ltree[13].Freq != 0)
- return Z_TEXT;
- for (n = 32; n < LITERALS; n++)
- if (s->dyn_ltree[n].Freq != 0)
- return Z_TEXT;
-
- /* There are no "block-listed" or "allow-listed" bytes:
- * this stream either is empty or has tolerated ("gray-listed") bytes only.
- */
- return Z_BINARY;
-}
-
-/* ===========================================================================
- * Reverse the first len bits of a code, using straightforward code (a faster
- * method would use a table)
- * IN assertion: 1 <= len <= 15
- */
-local unsigned bi_reverse(code, len)
- unsigned code; /* the value to invert */
- int len; /* its bit length */
-{
- register unsigned res = 0;
- do {
- res |= code & 1;
- code >>= 1, res <<= 1;
- } while (--len > 0);
- return res >> 1;
-}
-
-/* ===========================================================================
- * Flush the bit buffer, keeping at most 7 bits in it.
- */
-local void bi_flush(s)
- deflate_state *s;
-{
- if (s->bi_valid == 16) {
- put_short(s, s->bi_buf);
- s->bi_buf = 0;
- s->bi_valid = 0;
- } else if (s->bi_valid >= 8) {
- put_byte(s, (Byte)s->bi_buf);
- s->bi_buf >>= 8;
- s->bi_valid -= 8;
- }
-}
-
-/* ===========================================================================
- * Flush the bit buffer and align the output on a byte boundary
- */
-local void bi_windup(s)
- deflate_state *s;
-{
- if (s->bi_valid > 8) {
- put_short(s, s->bi_buf);
- } else if (s->bi_valid > 0) {
- put_byte(s, (Byte)s->bi_buf);
- }
- s->bi_buf = 0;
- s->bi_valid = 0;
-#ifdef ZLIB_DEBUG
- s->bits_sent = (s->bits_sent + 7) & ~7;
-#endif
-}
diff --git a/deps/zlib/uncompr.c b/deps/zlib/uncompr.c
index f9532f46c1a69f..5e256663b4511c 100644
--- a/deps/zlib/uncompr.c
+++ b/deps/zlib/uncompr.c
@@ -24,12 +24,8 @@
Z_DATA_ERROR if the input data was corrupted, including if the input data is
an incomplete zlib stream.
*/
-int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong *sourceLen;
-{
+int ZEXPORT uncompress2(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong *sourceLen) {
z_stream stream;
int err;
const uInt max = (uInt)-1;
@@ -83,11 +79,7 @@ int ZEXPORT uncompress2(dest, destLen, source, sourceLen)
err;
}
-int ZEXPORT uncompress(dest, destLen, source, sourceLen)
- Bytef *dest;
- uLongf *destLen;
- const Bytef *source;
- uLong sourceLen;
-{
+int ZEXPORT uncompress(Bytef *dest, uLongf *destLen, const Bytef *source,
+ uLong sourceLen) {
return uncompress2(dest, destLen, source, &sourceLen);
}
diff --git a/deps/zlib/zconf.h b/deps/zlib/zconf.h
index 2f2956b13e6afd..4f2ebbb755e433 100644
--- a/deps/zlib/zconf.h
+++ b/deps/zlib/zconf.h
@@ -539,7 +539,7 @@ typedef uLong FAR uLongf;
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
-# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
+# if defined(_WIN32) && !defined(__GNUC__)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
diff --git a/deps/zlib/zconf.h.cmakein b/deps/zlib/zconf.h.cmakein
index 9cc20bfb6ce74b..310c43928a299f 100644
--- a/deps/zlib/zconf.h.cmakein
+++ b/deps/zlib/zconf.h.cmakein
@@ -526,7 +526,7 @@ typedef uLong FAR uLongf;
#if !defined(_WIN32) && defined(Z_LARGE64)
# define z_off64_t off64_t
#else
-# if defined(_WIN32) && !defined(__GNUC__) && !defined(Z_SOLO)
+# if defined(_WIN32) && !defined(__GNUC__)
# define z_off64_t __int64
# else
# define z_off64_t z_off_t
diff --git a/deps/zlib/zlib.h b/deps/zlib/zlib.h
index 5b503db75664dd..014331fc8dc33f 100644
--- a/deps/zlib/zlib.h
+++ b/deps/zlib/zlib.h
@@ -78,8 +78,8 @@ extern "C" {
even in the case of corrupted input.
*/
-typedef voidpf (*alloc_func) OF((voidpf opaque, uInt items, uInt size));
-typedef void (*free_func) OF((voidpf opaque, voidpf address));
+typedef voidpf (*alloc_func)(voidpf opaque, uInt items, uInt size);
+typedef void (*free_func)(voidpf opaque, voidpf address);
struct internal_state;
@@ -217,7 +217,7 @@ typedef gz_header FAR *gz_headerp;
/* basic functions */
-ZEXTERN const char * ZEXPORT zlibVersion OF((void));
+ZEXTERN const char * ZEXPORT zlibVersion(void);
/* The application can compare zlibVersion and ZLIB_VERSION for consistency.
If the first character differs, the library code actually used is not
compatible with the zlib.h header file used by the application. This check
@@ -225,7 +225,7 @@ ZEXTERN const char * ZEXPORT zlibVersion OF((void));
*/
/*
-ZEXTERN int ZEXPORT deflateInit OF((z_streamp strm, int level));
+ZEXTERN int ZEXPORT deflateInit(z_streamp strm, int level);
Initializes the internal stream state for compression. The fields
zalloc, zfree and opaque must be initialized before by the caller. If
@@ -247,7 +247,7 @@ ZEXTERN int ZEXPORT deflateInit OF((z_streamp strm, int level));
*/
-ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
+ZEXTERN int ZEXPORT deflate(z_streamp strm, int flush);
/*
deflate compresses as much data as possible, and stops when the input
buffer becomes empty or the output buffer becomes full. It may introduce
@@ -359,7 +359,7 @@ ZEXTERN int ZEXPORT deflate OF((z_streamp strm, int flush));
continue compressing.
*/
-ZEXTERN int ZEXPORT deflateEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT deflateEnd(z_streamp strm);
/*
All dynamically allocated data structures for this stream are freed.
This function discards any unprocessed input and does not flush any pending
@@ -374,7 +374,7 @@ ZEXTERN int ZEXPORT deflateEnd OF((z_streamp strm));
/*
-ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateInit(z_streamp strm);
Initializes the internal stream state for decompression. The fields
next_in, avail_in, zalloc, zfree and opaque must be initialized before by
@@ -396,7 +396,7 @@ ZEXTERN int ZEXPORT inflateInit OF((z_streamp strm));
*/
-ZEXTERN int ZEXPORT inflate OF((z_streamp strm, int flush));
+ZEXTERN int ZEXPORT inflate(z_streamp strm, int flush);
/*
inflate decompresses as much data as possible, and stops when the input
buffer becomes empty or the output buffer becomes full. It may introduce
@@ -516,7 +516,7 @@ ZEXTERN int ZEXPORT inflate OF((z_streamp strm, int flush));
*/
-ZEXTERN int ZEXPORT inflateEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateEnd(z_streamp strm);
/*
All dynamically allocated data structures for this stream are freed.
This function discards any unprocessed input and does not flush any pending
@@ -534,12 +534,12 @@ ZEXTERN int ZEXPORT inflateEnd OF((z_streamp strm));
*/
/*
-ZEXTERN int ZEXPORT deflateInit2 OF((z_streamp strm,
- int level,
- int method,
- int windowBits,
- int memLevel,
- int strategy));
+ZEXTERN int ZEXPORT deflateInit2(z_streamp strm,
+ int level,
+ int method,
+ int windowBits,
+ int memLevel,
+ int strategy);
This is another version of deflateInit with more compression options. The
fields zalloc, zfree and opaque must be initialized before by the caller.
@@ -606,9 +606,9 @@ ZEXTERN int ZEXPORT deflateInit2 OF((z_streamp strm,
compression: this will be done by deflate().
*/
-ZEXTERN int ZEXPORT deflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
+ZEXTERN int ZEXPORT deflateSetDictionary(z_streamp strm,
+ const Bytef *dictionary,
+ uInt dictLength);
/*
Initializes the compression dictionary from the given byte sequence
without producing any compressed output. When using the zlib format, this
@@ -650,9 +650,9 @@ ZEXTERN int ZEXPORT deflateSetDictionary OF((z_streamp strm,
not perform any compression: this will be done by deflate().
*/
-ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
+ZEXTERN int ZEXPORT deflateGetDictionary(z_streamp strm,
+ Bytef *dictionary,
+ uInt *dictLength);
/*
Returns the sliding dictionary being maintained by deflate. dictLength is
set to the number of bytes in the dictionary, and that many bytes are copied
@@ -672,8 +672,8 @@ ZEXTERN int ZEXPORT deflateGetDictionary OF((z_streamp strm,
stream state is inconsistent.
*/
-ZEXTERN int ZEXPORT deflateCopy OF((z_streamp dest,
- z_streamp source));
+ZEXTERN int ZEXPORT deflateCopy(z_streamp dest,
+ z_streamp source);
/*
Sets the destination stream as a complete copy of the source stream.
@@ -690,7 +690,7 @@ ZEXTERN int ZEXPORT deflateCopy OF((z_streamp dest,
destination.
*/
-ZEXTERN int ZEXPORT deflateReset OF((z_streamp strm));
+ZEXTERN int ZEXPORT deflateReset(z_streamp strm);
/*
This function is equivalent to deflateEnd followed by deflateInit, but
does not free and reallocate the internal compression state. The stream
@@ -701,9 +701,9 @@ ZEXTERN int ZEXPORT deflateReset OF((z_streamp strm));
stream state was inconsistent (such as zalloc or state being Z_NULL).
*/
-ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
- int level,
- int strategy));
+ZEXTERN int ZEXPORT deflateParams(z_streamp strm,
+ int level,
+ int strategy);
/*
Dynamically update the compression level and compression strategy. The
interpretation of level and strategy is as in deflateInit2(). This can be
@@ -728,7 +728,7 @@ ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
Then no more input data should be provided before the deflateParams() call.
If this is done, the old level and strategy will be applied to the data
compressed before deflateParams(), and the new level and strategy will be
- applied to the the data compressed after deflateParams().
+ applied to the data compressed after deflateParams().
deflateParams returns Z_OK on success, Z_STREAM_ERROR if the source stream
state was inconsistent or if a parameter was invalid, or Z_BUF_ERROR if
@@ -739,11 +739,11 @@ ZEXTERN int ZEXPORT deflateParams OF((z_streamp strm,
retried with more output space.
*/
-ZEXTERN int ZEXPORT deflateTune OF((z_streamp strm,
- int good_length,
- int max_lazy,
- int nice_length,
- int max_chain));
+ZEXTERN int ZEXPORT deflateTune(z_streamp strm,
+ int good_length,
+ int max_lazy,
+ int nice_length,
+ int max_chain);
/*
Fine tune deflate's internal compression parameters. This should only be
used by someone who understands the algorithm used by zlib's deflate for
@@ -756,8 +756,8 @@ ZEXTERN int ZEXPORT deflateTune OF((z_streamp strm,
returns Z_OK on success, or Z_STREAM_ERROR for an invalid deflate stream.
*/
-ZEXTERN uLong ZEXPORT deflateBound OF((z_streamp strm,
- uLong sourceLen));
+ZEXTERN uLong ZEXPORT deflateBound(z_streamp strm,
+ uLong sourceLen);
/*
deflateBound() returns an upper bound on the compressed size after
deflation of sourceLen bytes. It must be called after deflateInit() or
@@ -771,9 +771,9 @@ ZEXTERN uLong ZEXPORT deflateBound OF((z_streamp strm,
than Z_FINISH or Z_NO_FLUSH are used.
*/
-ZEXTERN int ZEXPORT deflatePending OF((z_streamp strm,
- unsigned *pending,
- int *bits));
+ZEXTERN int ZEXPORT deflatePending(z_streamp strm,
+ unsigned *pending,
+ int *bits);
/*
deflatePending() returns the number of bytes and bits of output that have
been generated, but not yet provided in the available output. The bytes not
@@ -786,9 +786,9 @@ ZEXTERN int ZEXPORT deflatePending OF((z_streamp strm,
stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT deflatePrime OF((z_streamp strm,
- int bits,
- int value));
+ZEXTERN int ZEXPORT deflatePrime(z_streamp strm,
+ int bits,
+ int value);
/*
deflatePrime() inserts bits in the deflate output stream. The intent
is that this function is used to start off the deflate output with the bits
@@ -803,8 +803,8 @@ ZEXTERN int ZEXPORT deflatePrime OF((z_streamp strm,
source stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT deflateSetHeader OF((z_streamp strm,
- gz_headerp head));
+ZEXTERN int ZEXPORT deflateSetHeader(z_streamp strm,
+ gz_headerp head);
/*
deflateSetHeader() provides gzip header information for when a gzip
stream is requested by deflateInit2(). deflateSetHeader() may be called
@@ -828,8 +828,8 @@ ZEXTERN int ZEXPORT deflateSetHeader OF((z_streamp strm,
*/
/*
-ZEXTERN int ZEXPORT inflateInit2 OF((z_streamp strm,
- int windowBits));
+ZEXTERN int ZEXPORT inflateInit2(z_streamp strm,
+ int windowBits);
This is another version of inflateInit with an extra parameter. The
fields next_in, avail_in, zalloc, zfree and opaque must be initialized
@@ -882,9 +882,9 @@ ZEXTERN int ZEXPORT inflateInit2 OF((z_streamp strm,
deferred until inflate() is called.
*/
-ZEXTERN int ZEXPORT inflateSetDictionary OF((z_streamp strm,
- const Bytef *dictionary,
- uInt dictLength));
+ZEXTERN int ZEXPORT inflateSetDictionary(z_streamp strm,
+ const Bytef *dictionary,
+ uInt dictLength);
/*
Initializes the decompression dictionary from the given uncompressed byte
sequence. This function must be called immediately after a call of inflate,
@@ -905,9 +905,9 @@ ZEXTERN int ZEXPORT inflateSetDictionary OF((z_streamp strm,
inflate().
*/
-ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
- Bytef *dictionary,
- uInt *dictLength));
+ZEXTERN int ZEXPORT inflateGetDictionary(z_streamp strm,
+ Bytef *dictionary,
+ uInt *dictLength);
/*
Returns the sliding dictionary being maintained by inflate. dictLength is
set to the number of bytes in the dictionary, and that many bytes are copied
@@ -920,7 +920,7 @@ ZEXTERN int ZEXPORT inflateGetDictionary OF((z_streamp strm,
stream state is inconsistent.
*/
-ZEXTERN int ZEXPORT inflateSync OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateSync(z_streamp strm);
/*
Skips invalid compressed data until a possible full flush point (see above
for the description of deflate with Z_FULL_FLUSH) can be found, or until all
@@ -939,8 +939,8 @@ ZEXTERN int ZEXPORT inflateSync OF((z_streamp strm));
input each time, until success or end of the input data.
*/
-ZEXTERN int ZEXPORT inflateCopy OF((z_streamp dest,
- z_streamp source));
+ZEXTERN int ZEXPORT inflateCopy(z_streamp dest,
+ z_streamp source);
/*
Sets the destination stream as a complete copy of the source stream.
@@ -955,7 +955,7 @@ ZEXTERN int ZEXPORT inflateCopy OF((z_streamp dest,
destination.
*/
-ZEXTERN int ZEXPORT inflateReset OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateReset(z_streamp strm);
/*
This function is equivalent to inflateEnd followed by inflateInit,
but does not free and reallocate the internal decompression state. The
@@ -965,8 +965,8 @@ ZEXTERN int ZEXPORT inflateReset OF((z_streamp strm));
stream state was inconsistent (such as zalloc or state being Z_NULL).
*/
-ZEXTERN int ZEXPORT inflateReset2 OF((z_streamp strm,
- int windowBits));
+ZEXTERN int ZEXPORT inflateReset2(z_streamp strm,
+ int windowBits);
/*
This function is the same as inflateReset, but it also permits changing
the wrap and window size requests. The windowBits parameter is interpreted
@@ -979,9 +979,9 @@ ZEXTERN int ZEXPORT inflateReset2 OF((z_streamp strm,
the windowBits parameter is invalid.
*/
-ZEXTERN int ZEXPORT inflatePrime OF((z_streamp strm,
- int bits,
- int value));
+ZEXTERN int ZEXPORT inflatePrime(z_streamp strm,
+ int bits,
+ int value);
/*
This function inserts bits in the inflate input stream. The intent is
that this function is used to start inflating at a bit position in the
@@ -1000,7 +1000,7 @@ ZEXTERN int ZEXPORT inflatePrime OF((z_streamp strm,
stream state was inconsistent.
*/
-ZEXTERN long ZEXPORT inflateMark OF((z_streamp strm));
+ZEXTERN long ZEXPORT inflateMark(z_streamp strm);
/*
This function returns two values, one in the lower 16 bits of the return
value, and the other in the remaining upper bits, obtained by shifting the
@@ -1028,8 +1028,8 @@ ZEXTERN long ZEXPORT inflateMark OF((z_streamp strm));
source stream state was inconsistent.
*/
-ZEXTERN int ZEXPORT inflateGetHeader OF((z_streamp strm,
- gz_headerp head));
+ZEXTERN int ZEXPORT inflateGetHeader(z_streamp strm,
+ gz_headerp head);
/*
inflateGetHeader() requests that gzip header information be stored in the
provided gz_header structure. inflateGetHeader() may be called after
@@ -1069,8 +1069,8 @@ ZEXTERN int ZEXPORT inflateGetHeader OF((z_streamp strm,
*/
/*
-ZEXTERN int ZEXPORT inflateBackInit OF((z_streamp strm, int windowBits,
- unsigned char FAR *window));
+ZEXTERN int ZEXPORT inflateBackInit(z_streamp strm, int windowBits,
+ unsigned char FAR *window);
Initialize the internal stream state for decompression using inflateBack()
calls. The fields zalloc, zfree and opaque in strm must be initialized
@@ -1090,13 +1090,13 @@ ZEXTERN int ZEXPORT inflateBackInit OF((z_streamp strm, int windowBits,
the version of the header file.
*/
-typedef unsigned (*in_func) OF((void FAR *,
- z_const unsigned char FAR * FAR *));
-typedef int (*out_func) OF((void FAR *, unsigned char FAR *, unsigned));
+typedef unsigned (*in_func)(void FAR *,
+ z_const unsigned char FAR * FAR *);
+typedef int (*out_func)(void FAR *, unsigned char FAR *, unsigned);
-ZEXTERN int ZEXPORT inflateBack OF((z_streamp strm,
- in_func in, void FAR *in_desc,
- out_func out, void FAR *out_desc));
+ZEXTERN int ZEXPORT inflateBack(z_streamp strm,
+ in_func in, void FAR *in_desc,
+ out_func out, void FAR *out_desc);
/*
inflateBack() does a raw inflate with a single call using a call-back
interface for input and output. This is potentially more efficient than
@@ -1164,7 +1164,7 @@ ZEXTERN int ZEXPORT inflateBack OF((z_streamp strm,
cannot return Z_OK.
*/
-ZEXTERN int ZEXPORT inflateBackEnd OF((z_streamp strm));
+ZEXTERN int ZEXPORT inflateBackEnd(z_streamp strm);
/*
All memory allocated by inflateBackInit() is freed.
@@ -1172,7 +1172,7 @@ ZEXTERN int ZEXPORT inflateBackEnd OF((z_streamp strm));
state was inconsistent.
*/
-ZEXTERN uLong ZEXPORT zlibCompileFlags OF((void));
+ZEXTERN uLong ZEXPORT zlibCompileFlags(void);
/* Return flags indicating compile-time options.
Type sizes, two bits each, 00 = 16 bits, 01 = 32, 10 = 64, 11 = other:
@@ -1225,8 +1225,8 @@ ZEXTERN uLong ZEXPORT zlibCompileFlags OF((void));
you need special options.
*/
-ZEXTERN int ZEXPORT compress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
+ZEXTERN int ZEXPORT compress(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen);
/*
Compresses the source buffer into the destination buffer. sourceLen is
the byte length of the source buffer. Upon entry, destLen is the total size
@@ -1240,9 +1240,9 @@ ZEXTERN int ZEXPORT compress OF((Bytef *dest, uLongf *destLen,
buffer.
*/
-ZEXTERN int ZEXPORT compress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen,
- int level));
+ZEXTERN int ZEXPORT compress2(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen,
+ int level);
/*
Compresses the source buffer into the destination buffer. The level
parameter has the same meaning as in deflateInit. sourceLen is the byte
@@ -1256,15 +1256,15 @@ ZEXTERN int ZEXPORT compress2 OF((Bytef *dest, uLongf *destLen,
Z_STREAM_ERROR if the level parameter is invalid.
*/
-ZEXTERN uLong ZEXPORT compressBound OF((uLong sourceLen));
+ZEXTERN uLong ZEXPORT compressBound(uLong sourceLen);
/*
compressBound() returns an upper bound on the compressed size after
compress() or compress2() on sourceLen bytes. It would be used before a
compress() or compress2() call to allocate the destination buffer.
*/
-ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong sourceLen));
+ZEXTERN int ZEXPORT uncompress(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong sourceLen);
/*
Decompresses the source buffer into the destination buffer. sourceLen is
the byte length of the source buffer. Upon entry, destLen is the total size
@@ -1281,8 +1281,8 @@ ZEXTERN int ZEXPORT uncompress OF((Bytef *dest, uLongf *destLen,
buffer with the uncompressed data up to that point.
*/
-ZEXTERN int ZEXPORT uncompress2 OF((Bytef *dest, uLongf *destLen,
- const Bytef *source, uLong *sourceLen));
+ZEXTERN int ZEXPORT uncompress2(Bytef *dest, uLongf *destLen,
+ const Bytef *source, uLong *sourceLen);
/*
Same as uncompress, except that sourceLen is a pointer, where the
length of the source is *sourceLen. On return, *sourceLen is the number of
@@ -1301,7 +1301,7 @@ ZEXTERN int ZEXPORT uncompress2 OF((Bytef *dest, uLongf *destLen,
typedef struct gzFile_s *gzFile; /* semi-opaque gzip file descriptor */
/*
-ZEXTERN gzFile ZEXPORT gzopen OF((const char *path, const char *mode));
+ZEXTERN gzFile ZEXPORT gzopen(const char *path, const char *mode);
Open the gzip (.gz) file at path for reading and decompressing, or
compressing and writing. The mode parameter is as in fopen ("rb" or "wb")
@@ -1338,7 +1338,7 @@ ZEXTERN gzFile ZEXPORT gzopen OF((const char *path, const char *mode));
file could not be opened.
*/
-ZEXTERN gzFile ZEXPORT gzdopen OF((int fd, const char *mode));
+ZEXTERN gzFile ZEXPORT gzdopen(int fd, const char *mode);
/*
Associate a gzFile with the file descriptor fd. File descriptors are
obtained from calls like open, dup, creat, pipe or fileno (if the file has
@@ -1361,7 +1361,7 @@ ZEXTERN gzFile ZEXPORT gzdopen OF((int fd, const char *mode));
will not detect if fd is invalid (unless fd is -1).
*/
-ZEXTERN int ZEXPORT gzbuffer OF((gzFile file, unsigned size));
+ZEXTERN int ZEXPORT gzbuffer(gzFile file, unsigned size);
/*
Set the internal buffer size used by this library's functions for file to
size. The default buffer size is 8192 bytes. This function must be called
@@ -1377,7 +1377,7 @@ ZEXTERN int ZEXPORT gzbuffer OF((gzFile file, unsigned size));
too late.
*/
-ZEXTERN int ZEXPORT gzsetparams OF((gzFile file, int level, int strategy));
+ZEXTERN int ZEXPORT gzsetparams(gzFile file, int level, int strategy);
/*
Dynamically update the compression level and strategy for file. See the
description of deflateInit2 for the meaning of these parameters. Previously
@@ -1388,7 +1388,7 @@ ZEXTERN int ZEXPORT gzsetparams OF((gzFile file, int level, int strategy));
or Z_MEM_ERROR if there is a memory allocation error.
*/
-ZEXTERN int ZEXPORT gzread OF((gzFile file, voidp buf, unsigned len));
+ZEXTERN int ZEXPORT gzread(gzFile file, voidp buf, unsigned len);
/*
Read and decompress up to len uncompressed bytes from file into buf. If
the input file is not in gzip format, gzread copies the given number of
@@ -1418,8 +1418,8 @@ ZEXTERN int ZEXPORT gzread OF((gzFile file, voidp buf, unsigned len));
Z_STREAM_ERROR.
*/
-ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
- gzFile file));
+ZEXTERN z_size_t ZEXPORT gzfread(voidp buf, z_size_t size, z_size_t nitems,
+ gzFile file);
/*
Read and decompress up to nitems items of size size from file into buf,
otherwise operating as gzread() does. This duplicates the interface of
@@ -1444,14 +1444,14 @@ ZEXTERN z_size_t ZEXPORT gzfread OF((voidp buf, z_size_t size, z_size_t nitems,
file, resetting and retrying on end-of-file, when size is not 1.
*/
-ZEXTERN int ZEXPORT gzwrite OF((gzFile file, voidpc buf, unsigned len));
+ZEXTERN int ZEXPORT gzwrite(gzFile file, voidpc buf, unsigned len);
/*
Compress and write the len uncompressed bytes at buf to file. gzwrite
returns the number of uncompressed bytes written or 0 in case of error.
*/
-ZEXTERN z_size_t ZEXPORT gzfwrite OF((voidpc buf, z_size_t size,
- z_size_t nitems, gzFile file));
+ZEXTERN z_size_t ZEXPORT gzfwrite(voidpc buf, z_size_t size,
+ z_size_t nitems, gzFile file);
/*
Compress and write nitems items of size size from buf to file, duplicating
the interface of stdio's fwrite(), with size_t request and return types. If
@@ -1464,7 +1464,7 @@ ZEXTERN z_size_t ZEXPORT gzfwrite OF((voidpc buf, z_size_t size,
is returned, and the error state is set to Z_STREAM_ERROR.
*/
-ZEXTERN int ZEXPORTVA gzprintf Z_ARG((gzFile file, const char *format, ...));
+ZEXTERN int ZEXPORTVA gzprintf(gzFile file, const char *format, ...);
/*
Convert, format, compress, and write the arguments (...) to file under
control of the string format, as in fprintf. gzprintf returns the number of
@@ -1479,7 +1479,7 @@ ZEXTERN int ZEXPORTVA gzprintf Z_ARG((gzFile file, const char *format, ...));
This can be determined using zlibCompileFlags().
*/
-ZEXTERN int ZEXPORT gzputs OF((gzFile file, const char *s));
+ZEXTERN int ZEXPORT gzputs(gzFile file, const char *s);
/*
Compress and write the given null-terminated string s to file, excluding
the terminating null character.
@@ -1487,7 +1487,7 @@ ZEXTERN int ZEXPORT gzputs OF((gzFile file, const char *s));
gzputs returns the number of characters written, or -1 in case of error.
*/
-ZEXTERN char * ZEXPORT gzgets OF((gzFile file, char *buf, int len));
+ZEXTERN char * ZEXPORT gzgets(gzFile file, char *buf, int len);
/*
Read and decompress bytes from file into buf, until len-1 characters are
read, or until a newline character is read and transferred to buf, or an
@@ -1501,13 +1501,13 @@ ZEXTERN char * ZEXPORT gzgets OF((gzFile file, char *buf, int len));
buf are indeterminate.
*/
-ZEXTERN int ZEXPORT gzputc OF((gzFile file, int c));
+ZEXTERN int ZEXPORT gzputc(gzFile file, int c);
/*
Compress and write c, converted to an unsigned char, into file. gzputc
returns the value that was written, or -1 in case of error.
*/
-ZEXTERN int ZEXPORT gzgetc OF((gzFile file));
+ZEXTERN int ZEXPORT gzgetc(gzFile file);
/*
Read and decompress one byte from file. gzgetc returns this byte or -1
in case of end of file or error. This is implemented as a macro for speed.
@@ -1516,7 +1516,7 @@ ZEXTERN int ZEXPORT gzgetc OF((gzFile file));
points to has been clobbered or not.
*/
-ZEXTERN int ZEXPORT gzungetc OF((int c, gzFile file));
+ZEXTERN int ZEXPORT gzungetc(int c, gzFile file);
/*
Push c back onto the stream for file to be read as the first character on
the next read. At least one character of push-back is always allowed.
@@ -1528,7 +1528,7 @@ ZEXTERN int ZEXPORT gzungetc OF((int c, gzFile file));
gzseek() or gzrewind().
*/
-ZEXTERN int ZEXPORT gzflush OF((gzFile file, int flush));
+ZEXTERN int ZEXPORT gzflush(gzFile file, int flush);
/*
Flush all pending output to file. The parameter flush is as in the
deflate() function. The return value is the zlib error number (see function
@@ -1544,8 +1544,8 @@ ZEXTERN int ZEXPORT gzflush OF((gzFile file, int flush));
*/
/*
-ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile file,
- z_off_t offset, int whence));
+ZEXTERN z_off_t ZEXPORT gzseek(gzFile file,
+ z_off_t offset, int whence);
Set the starting position to offset relative to whence for the next gzread
or gzwrite on file. The offset represents a number of bytes in the
@@ -1563,7 +1563,7 @@ ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile file,
would be before the current position.
*/
-ZEXTERN int ZEXPORT gzrewind OF((gzFile file));
+ZEXTERN int ZEXPORT gzrewind(gzFile file);
/*
Rewind file. This function is supported only for reading.
@@ -1571,7 +1571,7 @@ ZEXTERN int ZEXPORT gzrewind OF((gzFile file));
*/
/*
-ZEXTERN z_off_t ZEXPORT gztell OF((gzFile file));
+ZEXTERN z_off_t ZEXPORT gztell(gzFile file);
Return the starting position for the next gzread or gzwrite on file.
This position represents a number of bytes in the uncompressed data stream,
@@ -1582,7 +1582,7 @@ ZEXTERN z_off_t ZEXPORT gztell OF((gzFile file));
*/
/*
-ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile file));
+ZEXTERN z_off_t ZEXPORT gzoffset(gzFile file);
Return the current compressed (actual) read or write offset of file. This
offset includes the count of bytes that precede the gzip stream, for example
@@ -1591,7 +1591,7 @@ ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile file));
be used for a progress indicator. On error, gzoffset() returns -1.
*/
-ZEXTERN int ZEXPORT gzeof OF((gzFile file));
+ZEXTERN int ZEXPORT gzeof(gzFile file);
/*
Return true (1) if the end-of-file indicator for file has been set while
reading, false (0) otherwise. Note that the end-of-file indicator is set
@@ -1606,7 +1606,7 @@ ZEXTERN int ZEXPORT gzeof OF((gzFile file));
has grown since the previous end of file was detected.
*/
-ZEXTERN int ZEXPORT gzdirect OF((gzFile file));
+ZEXTERN int ZEXPORT gzdirect(gzFile file);
/*
Return true (1) if file is being copied directly while reading, or false
(0) if file is a gzip stream being decompressed.
@@ -1627,7 +1627,7 @@ ZEXTERN int ZEXPORT gzdirect OF((gzFile file));
gzip file reading and decompression, which may not be desired.)
*/
-ZEXTERN int ZEXPORT gzclose OF((gzFile file));
+ZEXTERN int ZEXPORT gzclose(gzFile file);
/*
Flush all pending output for file, if necessary, close file and
deallocate the (de)compression state. Note that once file is closed, you
@@ -1640,8 +1640,8 @@ ZEXTERN int ZEXPORT gzclose OF((gzFile file));
last read ended in the middle of a gzip stream, or Z_OK on success.
*/
-ZEXTERN int ZEXPORT gzclose_r OF((gzFile file));
-ZEXTERN int ZEXPORT gzclose_w OF((gzFile file));
+ZEXTERN int ZEXPORT gzclose_r(gzFile file);
+ZEXTERN int ZEXPORT gzclose_w(gzFile file);
/*
Same as gzclose(), but gzclose_r() is only for use when reading, and
gzclose_w() is only for use when writing or appending. The advantage to
@@ -1652,7 +1652,7 @@ ZEXTERN int ZEXPORT gzclose_w OF((gzFile file));
zlib library.
*/
-ZEXTERN const char * ZEXPORT gzerror OF((gzFile file, int *errnum));
+ZEXTERN const char * ZEXPORT gzerror(gzFile file, int *errnum);
/*
Return the error message for the last error which occurred on file.
errnum is set to zlib error number. If an error occurred in the file system
@@ -1668,7 +1668,7 @@ ZEXTERN const char * ZEXPORT gzerror OF((gzFile file, int *errnum));
functions above that do not distinguish those cases in their return values.
*/
-ZEXTERN void ZEXPORT gzclearerr OF((gzFile file));
+ZEXTERN void ZEXPORT gzclearerr(gzFile file);
/*
Clear the error and end-of-file flags for file. This is analogous to the
clearerr() function in stdio. This is useful for continuing to read a gzip
@@ -1685,7 +1685,7 @@ ZEXTERN void ZEXPORT gzclearerr OF((gzFile file));
library.
*/
-ZEXTERN uLong ZEXPORT adler32 OF((uLong adler, const Bytef *buf, uInt len));
+ZEXTERN uLong ZEXPORT adler32(uLong adler, const Bytef *buf, uInt len);
/*
Update a running Adler-32 checksum with the bytes buf[0..len-1] and
return the updated checksum. An Adler-32 value is in the range of a 32-bit
@@ -1705,15 +1705,15 @@ ZEXTERN uLong ZEXPORT adler32 OF((uLong adler, const Bytef *buf, uInt len));
if (adler != original_adler) error();
*/
-ZEXTERN uLong ZEXPORT adler32_z OF((uLong adler, const Bytef *buf,
- z_size_t len));
+ZEXTERN uLong ZEXPORT adler32_z(uLong adler, const Bytef *buf,
+ z_size_t len);
/*
Same as adler32(), but with a size_t length.
*/
/*
-ZEXTERN uLong ZEXPORT adler32_combine OF((uLong adler1, uLong adler2,
- z_off_t len2));
+ZEXTERN uLong ZEXPORT adler32_combine(uLong adler1, uLong adler2,
+ z_off_t len2);
Combine two Adler-32 checksums into one. For two sequences of bytes, seq1
and seq2 with lengths len1 and len2, Adler-32 checksums were calculated for
@@ -1723,7 +1723,7 @@ ZEXTERN uLong ZEXPORT adler32_combine OF((uLong adler1, uLong adler2,
negative, the result has no meaning or utility.
*/
-ZEXTERN uLong ZEXPORT crc32 OF((uLong crc, const Bytef *buf, uInt len));
+ZEXTERN uLong ZEXPORT crc32(uLong crc, const Bytef *buf, uInt len);
/*
Update a running CRC-32 with the bytes buf[0..len-1] and return the
updated CRC-32. A CRC-32 value is in the range of a 32-bit unsigned integer.
@@ -1741,14 +1741,14 @@ ZEXTERN uLong ZEXPORT crc32 OF((uLong crc, const Bytef *buf, uInt len));
if (crc != original_crc) error();
*/
-ZEXTERN uLong ZEXPORT crc32_z OF((uLong crc, const Bytef *buf,
- z_size_t len));
+ZEXTERN uLong ZEXPORT crc32_z(uLong crc, const Bytef *buf,
+ z_size_t len);
/*
Same as crc32(), but with a size_t length.
*/
/*
-ZEXTERN uLong ZEXPORT crc32_combine OF((uLong crc1, uLong crc2, z_off_t len2));
+ZEXTERN uLong ZEXPORT crc32_combine(uLong crc1, uLong crc2, z_off_t len2);
Combine two CRC-32 check values into one. For two sequences of bytes,
seq1 and seq2 with lengths len1 and len2, CRC-32 check values were
@@ -1758,13 +1758,13 @@ ZEXTERN uLong ZEXPORT crc32_combine OF((uLong crc1, uLong crc2, z_off_t len2));
*/
/*
-ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t len2));
+ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t len2);
Return the operator corresponding to length len2, to be used with
crc32_combine_op().
*/
-ZEXTERN uLong ZEXPORT crc32_combine_op OF((uLong crc1, uLong crc2, uLong op));
+ZEXTERN uLong ZEXPORT crc32_combine_op(uLong crc1, uLong crc2, uLong op);
/*
Give the same result as crc32_combine(), using op in place of len2. op is
is generated from len2 by crc32_combine_gen(). This will be faster than
@@ -1777,20 +1777,20 @@ ZEXTERN uLong ZEXPORT crc32_combine_op OF((uLong crc1, uLong crc2, uLong op));
/* deflateInit and inflateInit are macros to allow checking the zlib version
* and the compiler's view of z_stream:
*/
-ZEXTERN int ZEXPORT deflateInit_ OF((z_streamp strm, int level,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateInit_ OF((z_streamp strm,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT deflateInit2_ OF((z_streamp strm, int level, int method,
- int windowBits, int memLevel,
- int strategy, const char *version,
- int stream_size));
-ZEXTERN int ZEXPORT inflateInit2_ OF((z_streamp strm, int windowBits,
- const char *version, int stream_size));
-ZEXTERN int ZEXPORT inflateBackInit_ OF((z_streamp strm, int windowBits,
- unsigned char FAR *window,
- const char *version,
- int stream_size));
+ZEXTERN int ZEXPORT deflateInit_(z_streamp strm, int level,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT inflateInit_(z_streamp strm,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT deflateInit2_(z_streamp strm, int level, int method,
+ int windowBits, int memLevel,
+ int strategy, const char *version,
+ int stream_size);
+ZEXTERN int ZEXPORT inflateInit2_(z_streamp strm, int windowBits,
+ const char *version, int stream_size);
+ZEXTERN int ZEXPORT inflateBackInit_(z_streamp strm, int windowBits,
+ unsigned char FAR *window,
+ const char *version,
+ int stream_size);
#ifdef Z_PREFIX_SET
# define z_deflateInit(strm, level) \
deflateInit_((strm), (level), ZLIB_VERSION, (int)sizeof(z_stream))
@@ -1835,7 +1835,7 @@ struct gzFile_s {
unsigned char *next;
z_off64_t pos;
};
-ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
+ZEXTERN int ZEXPORT gzgetc_(gzFile file); /* backward compatibility */
#ifdef Z_PREFIX_SET
# undef z_gzgetc
# define z_gzgetc(g) \
@@ -1857,13 +1857,13 @@ ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
* without large file support, _LFS64_LARGEFILE must also be true
*/
#ifdef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off64_t ZEXPORT gzseek64 OF((gzFile, z_off64_t, int));
- ZEXTERN z_off64_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off64_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off64_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off64_t));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off64_t ZEXPORT gzseek64(gzFile, z_off64_t, int);
+ ZEXTERN z_off64_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off64_t ZEXPORT gzoffset64(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off64_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off64_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off64_t);
#endif
#if !defined(ZLIB_INTERNAL) && defined(Z_WANT64)
@@ -1910,50 +1910,50 @@ ZEXTERN int ZEXPORT gzgetc_ OF((gzFile file)); /* backward compatibility */
# endif
# endif
# ifndef Z_LARGE64
- ZEXTERN gzFile ZEXPORT gzopen64 OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek64 OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell64 OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset64 OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
+ ZEXTERN gzFile ZEXPORT gzopen64(const char *, const char *);
+ ZEXTERN z_off_t ZEXPORT gzseek64(gzFile, z_off_t, int);
+ ZEXTERN z_off_t ZEXPORT gztell64(gzFile);
+ ZEXTERN z_off_t ZEXPORT gzoffset64(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off_t);
# endif
#else
- ZEXTERN gzFile ZEXPORT gzopen OF((const char *, const char *));
- ZEXTERN z_off_t ZEXPORT gzseek OF((gzFile, z_off_t, int));
- ZEXTERN z_off_t ZEXPORT gztell OF((gzFile));
- ZEXTERN z_off_t ZEXPORT gzoffset OF((gzFile));
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
+ ZEXTERN gzFile ZEXPORT gzopen(const char *, const char *);
+ ZEXTERN z_off_t ZEXPORT gzseek(gzFile, z_off_t, int);
+ ZEXTERN z_off_t ZEXPORT gztell(gzFile);
+ ZEXTERN z_off_t ZEXPORT gzoffset(gzFile);
+ ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t);
#endif
#else /* Z_SOLO */
- ZEXTERN uLong ZEXPORT adler32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen OF((z_off_t));
+ ZEXTERN uLong ZEXPORT adler32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen(z_off_t);
#endif /* !Z_SOLO */
/* undocumented functions */
-ZEXTERN const char * ZEXPORT zError OF((int));
-ZEXTERN int ZEXPORT inflateSyncPoint OF((z_streamp));
-ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table OF((void));
-ZEXTERN int ZEXPORT inflateUndermine OF((z_streamp, int));
-ZEXTERN int ZEXPORT inflateValidate OF((z_streamp, int));
-ZEXTERN unsigned long ZEXPORT inflateCodesUsed OF((z_streamp));
-ZEXTERN int ZEXPORT inflateResetKeep OF((z_streamp));
-ZEXTERN int ZEXPORT deflateResetKeep OF((z_streamp));
+ZEXTERN const char * ZEXPORT zError(int);
+ZEXTERN int ZEXPORT inflateSyncPoint(z_streamp);
+ZEXTERN const z_crc_t FAR * ZEXPORT get_crc_table(void);
+ZEXTERN int ZEXPORT inflateUndermine(z_streamp, int);
+ZEXTERN int ZEXPORT inflateValidate(z_streamp, int);
+ZEXTERN unsigned long ZEXPORT inflateCodesUsed(z_streamp);
+ZEXTERN int ZEXPORT inflateResetKeep(z_streamp);
+ZEXTERN int ZEXPORT deflateResetKeep(z_streamp);
#if defined(_WIN32) && !defined(Z_SOLO)
-ZEXTERN gzFile ZEXPORT gzopen_w OF((const wchar_t *path,
- const char *mode));
+ZEXTERN gzFile ZEXPORT gzopen_w(const wchar_t *path,
+ const char *mode);
#endif
#if defined(STDC) || defined(Z_HAVE_STDARG_H)
# ifndef Z_SOLO
-ZEXTERN int ZEXPORTVA gzvprintf Z_ARG((gzFile file,
- const char *format,
- va_list va));
+ZEXTERN int ZEXPORTVA gzvprintf(gzFile file,
+ const char *format,
+ va_list va);
# endif
#endif
diff --git a/deps/zlib/zutil.c b/deps/zlib/zutil.c
index 9543ae825e3250..b1c5d2d3c6daf5 100644
--- a/deps/zlib/zutil.c
+++ b/deps/zlib/zutil.c
@@ -24,13 +24,11 @@ z_const char * const z_errmsg[10] = {
};
-const char * ZEXPORT zlibVersion()
-{
+const char * ZEXPORT zlibVersion(void) {
return ZLIB_VERSION;
}
-uLong ZEXPORT zlibCompileFlags()
-{
+uLong ZEXPORT zlibCompileFlags(void) {
uLong flags;
flags = 0;
@@ -121,9 +119,7 @@ uLong ZEXPORT zlibCompileFlags()
# endif
int ZLIB_INTERNAL z_verbose = verbose;
-void ZLIB_INTERNAL z_error(m)
- char *m;
-{
+void ZLIB_INTERNAL z_error(char *m) {
fprintf(stderr, "%s\n", m);
exit(1);
}
@@ -132,9 +128,7 @@ void ZLIB_INTERNAL z_error(m)
/* exported to allow conversion of error code to string for compress() and
* uncompress()
*/
-const char * ZEXPORT zError(err)
- int err;
-{
+const char * ZEXPORT zError(int err) {
return ERR_MSG(err);
}
@@ -148,22 +142,14 @@ const char * ZEXPORT zError(err)
#ifndef HAVE_MEMCPY
-void ZLIB_INTERNAL zmemcpy(dest, source, len)
- Bytef* dest;
- const Bytef* source;
- uInt len;
-{
+void ZLIB_INTERNAL zmemcpy(Bytef* dest, const Bytef* source, uInt len) {
if (len == 0) return;
do {
*dest++ = *source++; /* ??? to be unrolled */
} while (--len != 0);
}
-int ZLIB_INTERNAL zmemcmp(s1, s2, len)
- const Bytef* s1;
- const Bytef* s2;
- uInt len;
-{
+int ZLIB_INTERNAL zmemcmp(const Bytef* s1, const Bytef* s2, uInt len) {
uInt j;
for (j = 0; j < len; j++) {
@@ -172,10 +158,7 @@ int ZLIB_INTERNAL zmemcmp(s1, s2, len)
return 0;
}
-void ZLIB_INTERNAL zmemzero(dest, len)
- Bytef* dest;
- uInt len;
-{
+void ZLIB_INTERNAL zmemzero(Bytef* dest, uInt len) {
if (len == 0) return;
do {
*dest++ = 0; /* ??? to be unrolled */
@@ -216,8 +199,7 @@ local ptr_table table[MAX_PTR];
* a protected system like OS/2. Use Microsoft C instead.
*/
-voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size) {
voidpf buf;
ulg bsize = (ulg)items*size;
@@ -242,8 +224,7 @@ voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size)
return buf;
}
-void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
int n;
(void)opaque;
@@ -279,14 +260,12 @@ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
# define _hfree hfree
#endif
-voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size)
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, uInt items, uInt size) {
(void)opaque;
return _halloc((long)items, size);
}
-void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
(void)opaque;
_hfree(ptr);
}
@@ -299,25 +278,18 @@ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr)
#ifndef MY_ZCALLOC /* Any system without a special alloc function */
#ifndef STDC
-extern voidp malloc OF((uInt size));
-extern voidp calloc OF((uInt items, uInt size));
-extern void free OF((voidpf ptr));
+extern voidp malloc(uInt size);
+extern voidp calloc(uInt items, uInt size);
+extern void free(voidpf ptr);
#endif
-voidpf ZLIB_INTERNAL zcalloc(opaque, items, size)
- voidpf opaque;
- unsigned items;
- unsigned size;
-{
+voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items, unsigned size) {
(void)opaque;
return sizeof(uInt) > 2 ? (voidpf)malloc(items * size) :
(voidpf)calloc(items, size);
}
-void ZLIB_INTERNAL zcfree(opaque, ptr)
- voidpf opaque;
- voidpf ptr;
-{
+void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr) {
(void)opaque;
free(ptr);
}
diff --git a/deps/zlib/zutil.h b/deps/zlib/zutil.h
index e0466922244edc..6980a5f4ea3446 100644
--- a/deps/zlib/zutil.h
+++ b/deps/zlib/zutil.h
@@ -206,9 +206,9 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
/* provide prototypes for these when building zlib without LFS */
#if !defined(_WIN32) && \
(!defined(_LARGEFILE64_SOURCE) || _LFS64_LARGEFILE-0 == 0)
- ZEXTERN uLong ZEXPORT adler32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine64 OF((uLong, uLong, z_off_t));
- ZEXTERN uLong ZEXPORT crc32_combine_gen64 OF((z_off_t));
+ ZEXTERN uLong ZEXPORT adler32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine64(uLong, uLong, z_off_t);
+ ZEXTERN uLong ZEXPORT crc32_combine_gen64(z_off_t);
#endif
/* common defaults */
@@ -247,16 +247,16 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
# define zmemzero(dest, len) memset(dest, 0, len)
# endif
#else
- void ZLIB_INTERNAL zmemcpy OF((Bytef* dest, const Bytef* source, uInt len));
- int ZLIB_INTERNAL zmemcmp OF((const Bytef* s1, const Bytef* s2, uInt len));
- void ZLIB_INTERNAL zmemzero OF((Bytef* dest, uInt len));
+ void ZLIB_INTERNAL zmemcpy(Bytef* dest, const Bytef* source, uInt len);
+ int ZLIB_INTERNAL zmemcmp(const Bytef* s1, const Bytef* s2, uInt len);
+ void ZLIB_INTERNAL zmemzero(Bytef* dest, uInt len);
#endif
/* Diagnostic functions */
#ifdef ZLIB_DEBUG
# include
extern int ZLIB_INTERNAL z_verbose;
- extern void ZLIB_INTERNAL z_error OF((char *m));
+ extern void ZLIB_INTERNAL z_error(char *m);
# define Assert(cond,msg) {if(!(cond)) z_error(msg);}
# define Trace(x) {if (z_verbose>=0) fprintf x ;}
# define Tracev(x) {if (z_verbose>0) fprintf x ;}
@@ -273,9 +273,9 @@ extern z_const char * const z_errmsg[10]; /* indexed by 2-zlib_error */
#endif
#ifndef Z_SOLO
- voidpf ZLIB_INTERNAL zcalloc OF((voidpf opaque, unsigned items,
- unsigned size));
- void ZLIB_INTERNAL zcfree OF((voidpf opaque, voidpf ptr));
+ voidpf ZLIB_INTERNAL zcalloc(voidpf opaque, unsigned items,
+ unsigned size);
+ void ZLIB_INTERNAL zcfree(voidpf opaque, voidpf ptr);
#endif
#define ZALLOC(strm, items, size) \
diff --git a/doc/api/buffer.md b/doc/api/buffer.md
index 981ef7aa2d4071..3a46599649d025 100644
--- a/doc/api/buffer.md
+++ b/doc/api/buffer.md
@@ -1118,6 +1118,12 @@ const { Buffer } = require('node:buffer');
const buf = Buffer.from([0x62, 0x75, 0x66, 0x66, 0x65, 0x72]);
```
+If `array` is an `Array`-like object (that is, one with a `length` property of
+type `number`), it is treated as if it is an array, unless it is a `Buffer` or
+a `Uint8Array`. This means all other `TypedArray` variants get treated as an
+`Array`. To create a `Buffer` from the bytes backing a `TypedArray`, use
+[`Buffer.copyBytesFrom()`][].
+
A `TypeError` will be thrown if `array` is not an `Array` or another type
appropriate for `Buffer.from()` variants.
@@ -5490,6 +5496,7 @@ introducing security vulnerabilities into an application.
[`Buffer.allocUnsafe()`]: #static-method-bufferallocunsafesize
[`Buffer.allocUnsafeSlow()`]: #static-method-bufferallocunsafeslowsize
[`Buffer.concat()`]: #static-method-bufferconcatlist-totallength
+[`Buffer.copyBytesFrom()`]: #static-method-buffercopybytesfromview-offset-length
[`Buffer.from(array)`]: #static-method-bufferfromarray
[`Buffer.from(arrayBuf)`]: #static-method-bufferfromarraybuffer-byteoffset-length
[`Buffer.from(buffer)`]: #static-method-bufferfrombuffer
diff --git a/doc/api/errors.md b/doc/api/errors.md
index 09f866fc2869b5..8a212f3c19f1bc 100644
--- a/doc/api/errors.md
+++ b/doc/api/errors.md
@@ -46,15 +46,35 @@ try {
```
Any use of the JavaScript `throw` mechanism will raise an exception that
-_must_ be handled using `try…catch` or the Node.js process will exit
-immediately.
+_must_ be handled or the Node.js process will exit immediately.
With few exceptions, _Synchronous_ APIs (any blocking method that does not
-accept a `callback` function, such as [`fs.readFileSync`][]), will use `throw`
-to report errors.
+return a {Promise} nor accept a `callback` function, such as
+[`fs.readFileSync`][]), will use `throw` to report errors.
Errors that occur within _Asynchronous APIs_ may be reported in multiple ways:
+* Some asynchronous methods returns a {Promise}, you should always take into
+ account that it might be rejected. See [`--unhandled-rejections`][] flag for
+ how the process will react to an unhandled promise rejection.
+
+
+
+ ```js
+ const fs = require('fs/promises');
+
+ (async () => {
+ let data;
+ try {
+ data = await fs.readFile('a file that does not exist');
+ } catch (err) {
+ console.error('There was an error reading the file!', err);
+ return;
+ }
+ // Otherwise handle the data
+ })();
+ ```
+
* Most asynchronous methods that accept a `callback` function will accept an
`Error` object passed as the first argument to that function. If that first
argument is not `null` and is an instance of `Error`, then an error occurred
@@ -104,9 +124,9 @@ pass or fail).
For _all_ [`EventEmitter`][] objects, if an `'error'` event handler is not
provided, the error will be thrown, causing the Node.js process to report an
-uncaught exception and crash unless either: The [`domain`][domains] module is
-used appropriately or a handler has been registered for the
-[`'uncaughtException'`][] event.
+uncaught exception and crash unless either: a handler has been registered for
+the [`'uncaughtException'`][] event, or the deprecated [`node:domain`][domains]
+module is used.
```js
const EventEmitter = require('node:events');
@@ -125,60 +145,6 @@ they are thrown _after_ the calling code has already exited.
Developers must refer to the documentation for each method to determine
exactly how errors raised by those methods are propagated.
-### Error-first callbacks
-
-
-
-Most asynchronous methods exposed by the Node.js core API follow an idiomatic
-pattern referred to as an _error-first callback_. With this pattern, a callback
-function is passed to the method as an argument. When the operation either
-completes or an error is raised, the callback function is called with the
-`Error` object (if any) passed as the first argument. If no error was raised,
-the first argument will be passed as `null`.
-
-```js
-const fs = require('node:fs');
-
-function errorFirstCallback(err, data) {
- if (err) {
- console.error('There was an error', err);
- return;
- }
- console.log(data);
-}
-
-fs.readFile('/some/file/that/does-not-exist', errorFirstCallback);
-fs.readFile('/some/file/that/does-exist', errorFirstCallback);
-```
-
-The JavaScript `try…catch` mechanism **cannot** be used to intercept errors
-generated by asynchronous APIs. A common mistake for beginners is to try to
-use `throw` inside an error-first callback:
-
-```js
-// THIS WILL NOT WORK:
-const fs = require('node:fs');
-
-try {
- fs.readFile('/some/file/that/does-not-exist', (err, data) => {
- // Mistaken assumption: throwing here...
- if (err) {
- throw err;
- }
- });
-} catch (err) {
- // This will not catch the throw!
- console.error(err);
-}
-```
-
-This will not work because the callback function passed to `fs.readFile()` is
-called asynchronously. By the time the callback has been called, the
-surrounding code, including the `try…catch` block, will have already exited.
-Throwing an error inside the callback **can crash the Node.js process** in most
-cases. If [domains][] are enabled, or a handler has been registered with
-`process.on('uncaughtException')`, such errors can be intercepted.
-
## Class: `Error`
@@ -1267,6 +1233,23 @@ provided.
Encoding provided to `TextDecoder()` API was not one of the
[WHATWG Supported Encodings][].
+
+
+### `ERR_ESM_LOADER_REGISTRATION_UNAVAILABLE`
+
+
+
+Programmatically registering custom ESM loaders
+currently requires at least one custom loader to have been
+registered via the `--experimental-loader` flag. A no-op
+loader registered via CLI is sufficient
+(for example: `--experimental-loader data:text/javascript,`;
+do not omit the necessary trailing comma).
+A future version of Node.js will support the programmatic
+registration of loaders without needing to also use the flag.
+
### `ERR_EVAL_ESM_CANNOT_PRINT`
@@ -3600,6 +3583,7 @@ The native call from `process.cpuUsage` could not be processed.
[`--disable-proto=throw`]: cli.md#--disable-protomode
[`--force-fips`]: cli.md#--force-fips
[`--no-addons`]: cli.md#--no-addons
+[`--unhandled-rejections`]: cli.md#--unhandled-rejectionsmode
[`Class: assert.AssertionError`]: assert.md#class-assertassertionerror
[`ERR_INVALID_ARG_TYPE`]: #err_invalid_arg_type
[`ERR_MISSING_MESSAGE_PORT_IN_TRANSFER_LIST`]: #err_missing_message_port_in_transfer_list
diff --git a/doc/api/esm.md b/doc/api/esm.md
index 3d450ce3c69310..68ecad93b8ad3b 100644
--- a/doc/api/esm.md
+++ b/doc/api/esm.md
@@ -1225,6 +1225,17 @@ console.log('some module!');
If you run `node --experimental-loader ./import-map-loader.js main.js`
the output will be `some module!`.
+### Register loaders programmatically
+
+
+
+In addition to using the `--experimental-loader` option in the CLI,
+loaders can also be registered programmatically. You can find
+detailed information about this process in the documentation page
+for [`module.register()`][].
+
## Resolution and loading algorithm
### Features
@@ -1599,6 +1610,7 @@ for ESM specifiers is [commonjs-extension-resolution-loader][].
[`import.meta.url`]: #importmetaurl
[`import`]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/import
[`module.createRequire()`]: module.md#modulecreaterequirefilename
+[`module.register()`]: module.md#moduleregister
[`module.syncBuiltinESMExports()`]: module.md#modulesyncbuiltinesmexports
[`package.json`]: packages.md#nodejs-packagejson-field-definitions
[`port.ref()`]: https://nodejs.org/dist/latest-v17.x/docs/api/worker_threads.html#portref
diff --git a/doc/api/events.md b/doc/api/events.md
index e443c2162c8e64..ffeabe2fb53860 100644
--- a/doc/api/events.md
+++ b/doc/api/events.md
@@ -1880,8 +1880,7 @@ same options as `EventEmitter` and `AsyncResource` themselves.
* `captureRejections` {boolean} It enables
[automatic capturing of promise rejection][capturerejections].
**Default:** `false`.
- * `name` {string} The type of async event. **Default::**
- [`new.target.name`][].
+ * `name` {string} The type of async event. **Default:** [`new.target.name`][].
* `triggerAsyncId` {number} The ID of the execution context that created this
async event. **Default:** `executionAsyncId()`.
* `requireManualDestroy` {boolean} If set to `true`, disables `emitDestroy`
diff --git a/doc/api/fs.md b/doc/api/fs.md
index e95865db7fb200..01abd668c499b0 100644
--- a/doc/api/fs.md
+++ b/doc/api/fs.md
@@ -7973,6 +7973,9 @@ string.
* `'r'`: Open file for reading.
An exception occurs if the file does not exist.
+* `'rs'`: Open file for reading in synchronous mode.
+ An exception occurs if the file does not exist.
+
* `'r+'`: Open file for reading and writing.
An exception occurs if the file does not exist.
diff --git a/doc/api/globals.md b/doc/api/globals.md
index dc530b796f68b8..8285064c2e25e4 100644
--- a/doc/api/globals.md
+++ b/doc/api/globals.md
@@ -124,7 +124,7 @@ Returns a new `AbortSignal` which will be aborted in `delay` milliseconds.
#### Static method: `AbortSignal.any(signals)`
* `signals` {AbortSignal\[]} The `AbortSignal`s of which to compose a new `AbortSignal`.
diff --git a/doc/api/module.md b/doc/api/module.md
index d52ec34dd12a54..f3752f3f81a5b2 100644
--- a/doc/api/module.md
+++ b/doc/api/module.md
@@ -80,6 +80,101 @@ isBuiltin('fs'); // true
isBuiltin('wss'); // false
```
+### `module.register()`
+
+
+
+In addition to using the `--experimental-loader` option in the CLI,
+loaders can be registered programmatically using the
+`module.register()` method.
+
+```mjs
+import { register } from 'node:module';
+
+register('http-to-https', import.meta.url);
+
+// Because this is a dynamic `import()`, the `http-to-https` hooks will run
+// before importing `./my-app.mjs`.
+await import('./my-app.mjs');
+```
+
+In the example above, we are registering the `http-to-https` loader,
+but it will only be available for subsequently imported modules—in
+this case, `my-app.mjs`. If the `await import('./my-app.mjs')` had
+instead been a static `import './my-app.mjs'`, _the app would already
+have been loaded_ before the `http-to-https` hooks were
+registered. This is part of the design of ES modules, where static
+imports are evaluated from the leaves of the tree first back to the
+trunk. There can be static imports _within_ `my-app.mjs`, which
+will not be evaluated until `my-app.mjs` is when it's dynamically
+imported.
+
+The `--experimental-loader` flag of the CLI can be used together
+with the `register` function; the loaders registered with the
+function will follow the same evaluation chain of loaders registered
+within the CLI:
+
+```console
+node \
+ --experimental-loader unpkg \
+ --experimental-loader http-to-https \
+ --experimental-loader cache-buster \
+ entrypoint.mjs
+```
+
+```mjs
+// entrypoint.mjs
+import { URL } from 'node:url';
+import { register } from 'node:module';
+
+const loaderURL = new URL('./my-programmatically-loader.mjs', import.meta.url);
+
+register(loaderURL);
+await import('./my-app.mjs');
+```
+
+The `my-programmatic-loader.mjs` can leverage `unpkg`,
+`http-to-https`, and `cache-buster` loaders.
+
+It's also possible to use `register` more than once:
+
+```mjs
+// entrypoint.mjs
+import { URL } from 'node:url';
+import { register } from 'node:module';
+
+register(new URL('./first-loader.mjs', import.meta.url));
+register('./second-loader.mjs', import.meta.url);
+await import('./my-app.mjs');
+```
+
+Both loaders (`first-loader.mjs` and `second-loader.mjs`) can use
+all the resources provided by the loaders registered in the CLI. But
+remember that they will only be available in the next imported
+module (`my-app.mjs`). The evaluation order of the hooks when
+importing `my-app.mjs` and consecutive modules in the example above
+will be:
+
+```console
+resolve: second-loader.mjs
+resolve: first-loader.mjs
+resolve: cache-buster
+resolve: http-to-https
+resolve: unpkg
+load: second-loader.mjs
+load: first-loader.mjs
+load: cache-buster
+load: http-to-https
+load: unpkg
+globalPreload: second-loader.mjs
+globalPreload: first-loader.mjs
+globalPreload: cache-buster
+globalPreload: http-to-https
+globalPreload: unpkg
+```
+
### `module.syncBuiltinESMExports()`
-> Stability: 1 - Experimental
-
```c
NAPI_EXTERN napi_status node_api_throw_syntax_error(napi_env env,
const char* code,
@@ -1341,10 +1345,9 @@ This API returns a JavaScript `RangeError` with the text provided.
added:
- v17.2.0
- v16.14.0
+napiVersion: 9
-->
-> Stability: 1 - Experimental
-
```c
NAPI_EXTERN napi_status node_api_create_syntax_error(napi_env env,
napi_value code,
@@ -2592,10 +2595,9 @@ of the ECMAScript Language Specification.
added:
- v17.5.0
- v16.15.0
+napiVersion: 9
-->
-> Stability: 1 - Experimental
-
```c
napi_status node_api_symbol_for(napi_env env,
const char* utf8description,
@@ -2889,6 +2891,56 @@ string. The native string is copied.
The JavaScript `string` type is described in
[Section 6.1.4][] of the ECMAScript Language Specification.
+#### `node_api_create_external_string_latin1`
+
+
+
+> Stability: 1 - Experimental
+
+```c
+napi_status
+node_api_create_external_string_latin1(napi_env env,
+ char* str,
+ size_t length,
+ napi_finalize finalize_callback,
+ void* finalize_hint,
+ napi_value* result,
+ bool* copied);
+```
+
+* `[in] env`: The environment that the API is invoked under.
+* `[in] str`: Character buffer representing an ISO-8859-1-encoded string.
+* `[in] length`: The length of the string in bytes, or `NAPI_AUTO_LENGTH` if it
+ is null-terminated.
+* `[in] finalize_callback`: The function to call when the string is being
+ collected. The function will be called with the following parameters:
+ * `[in] env`: The environment in which the add-on is running. This value
+ may be null if the string is being collected as part of the termination
+ of the worker or the main Node.js instance.
+ * `[in] data`: This is the value `str` as a `void*` pointer.
+ * `[in] finalize_hint`: This is the value `finalize_hint` that was given
+ to the API.
+ [`napi_finalize`][] provides more details.
+ This parameter is optional. Passing a null value means that the add-on
+ doesn't need to be notified when the corresponding JavaScript string is
+ collected.
+* `[in] finalize_hint`: Optional hint to pass to the finalize callback during
+ collection.
+* `[out] result`: A `napi_value` representing a JavaScript `string`.
+* `[out] copied`: Whether the string was copied. If it was, the finalizer will
+ already have been invoked to destroy `str`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript `string` value from an ISO-8859-1-encoded C
+string. The native string may not be copied and must thus exist for the entire
+life cycle of the JavaScript value.
+
+The JavaScript `string` type is described in
+[Section 6.1.4][] of the ECMAScript Language Specification.
+
#### `napi_create_string_utf16`
+
+> Stability: 1 - Experimental
+
+```c
+napi_status
+node_api_create_external_string_utf16(napi_env env,
+ char16_t* str,
+ size_t length,
+ napi_finalize finalize_callback,
+ void* finalize_hint,
+ napi_value* result,
+ bool* copied);
+```
+
+* `[in] env`: The environment that the API is invoked under.
+* `[in] str`: Character buffer representing a UTF16-LE-encoded string.
+* `[in] length`: The length of the string in two-byte code units, or
+ `NAPI_AUTO_LENGTH` if it is null-terminated.
+* `[in] finalize_callback`: The function to call when the string is being
+ collected. The function will be called with the following parameters:
+ * `[in] env`: The environment in which the add-on is running. This value
+ may be null if the string is being collected as part of the termination
+ of the worker or the main Node.js instance.
+ * `[in] data`: This is the value `str` as a `void*` pointer.
+ * `[in] finalize_hint`: This is the value `finalize_hint` that was given
+ to the API.
+ [`napi_finalize`][] provides more details.
+ This parameter is optional. Passing a null value means that the add-on
+ doesn't need to be notified when the corresponding JavaScript string is
+ collected.
+* `[in] finalize_hint`: Optional hint to pass to the finalize callback during
+ collection.
+* `[out] result`: A `napi_value` representing a JavaScript `string`.
+* `[out] copied`: Whether the string was copied. If it was, the finalizer will
+ already have been invoked to destroy `str`.
+
+Returns `napi_ok` if the API succeeded.
+
+This API creates a JavaScript `string` value from a UTF16-LE-encoded C string.
+The native string may not be copied and must thus exist for the entire life
+cycle of the JavaScript value.
+
+The JavaScript `string` type is described in
+[Section 6.1.4][] of the ECMAScript Language Specification.
+
#### `napi_create_string_utf8`
-> Stability: 1 - Experimental
-
```c
NAPI_EXTERN napi_status
node_api_get_module_file_name(napi_env env, const char** result);
@@ -6475,6 +6581,8 @@ the add-on's file name during loading.
[`napi_wrap`]: #napi_wrap
[`node-addon-api`]: https://github.com/nodejs/node-addon-api
[`node_api.h`]: https://github.com/nodejs/node/blob/HEAD/src/node_api.h
+[`node_api_create_external_string_latin1`]: #node_api_create_external_string_latin1
+[`node_api_create_external_string_utf16`]: #node_api_create_external_string_utf16
[`node_api_create_syntax_error`]: #node_api_create_syntax_error
[`node_api_throw_syntax_error`]: #node_api_throw_syntax_error
[`process.release`]: process.md#processrelease
diff --git a/doc/api/stream.md b/doc/api/stream.md
index 21d6e33872f758..46cbf7965aebc1 100644
--- a/doc/api/stream.md
+++ b/doc/api/stream.md
@@ -2447,37 +2447,6 @@ import { Readable } from 'node:stream';
await Readable.from([1, 2, 3, 4]).take(2).toArray(); // [1, 2]
```
-##### `readable.asIndexedPairs([options])`
-
-
-
-> Stability: 1 - Experimental
-
-* `options` {Object}
- * `signal` {AbortSignal} allows destroying the stream if the signal is
- aborted.
-* Returns: {Readable} a stream of indexed pairs.
-
-This method returns a new stream with chunks of the underlying stream paired
-with a counter in the form `[index, chunk]`. The first index value is 0 and it
-increases by 1 for each chunk produced.
-
-```mjs
-import { Readable } from 'node:stream';
-
-const pairs = await Readable.from(['a', 'b', 'c']).asIndexedPairs().toArray();
-console.log(pairs); // [[0, 'a'], [1, 'b'], [2, 'c']]
-```
-
##### `readable.reduce(fn[, initial[, options]])`
-`;
- const ruleReadmePath = `./.README/rules/${ruleName}.md`;
- if (!(0, _fs.existsSync)(ruleReadmePath)) {
- await _promises.default.writeFile(ruleReadmePath, ruleReadmeTemplate);
- }
-
- /**
- * @param {object} cfg
- * @param {string} cfg.path
- * @param {RegExp} cfg.oldRegex
- * @param {string} cfg.checkName
- * @param {string} cfg.newLine
- * @param {boolean} [cfg.oldIsCamel]
- * @returns {Promise}
- */
- const replaceInOrder = async ({
- path,
- oldRegex,
- checkName,
- newLine,
- oldIsCamel
- }) => {
- /**
- * @typedef {number} Integer
- */
- /**
- * @typedef {{
- * matchedLine: string,
- * offset: Integer,
- * oldRule: string,
- * }} OffsetInfo
- */
- /**
- * @type {OffsetInfo[]}
- */
- const offsets = [];
- let readme = await _promises.default.readFile(path, 'utf8');
- readme.replace(oldRegex,
- /**
- * @param {string} matchedLine
- * @param {string} n1
- * @param {Integer} offset
- * @param {string} str
- * @param {object} groups
- * @param {string} groups.oldRule
- * @returns {string}
- */
- (matchedLine, n1, offset, str, {
- oldRule
- }) => {
- offsets.push({
- matchedLine,
- offset,
- oldRule
- });
- return matchedLine;
- });
- offsets.sort(({
- oldRule
- }, {
- oldRule: oldRuleB
- }) => {
- return oldRule < oldRuleB ? -1 : oldRule > oldRuleB ? 1 : 0;
- });
- let alreadyIncluded = false;
- const itemIndex = offsets.findIndex(({
- oldRule
- }) => {
- alreadyIncluded ||= oldIsCamel ? camelCasedRuleName === oldRule : ruleName === oldRule;
- return oldIsCamel ? camelCasedRuleName < oldRule : ruleName < oldRule;
- });
- let item = itemIndex !== undefined && offsets[itemIndex];
- if (item && itemIndex === 0 &&
- // This property would not always be sufficient but in this case it is.
- oldIsCamel) {
- item.offset = 0;
- }
- if (!item) {
- item = /** @type {OffsetInfo} */offsets.pop();
- item.offset += item.matchedLine.length;
- }
- if (alreadyIncluded) {
- console.log(`Rule name is already present in ${checkName}.`);
- } else {
- readme = readme.slice(0, item.offset) + (item.offset ? '\n' : '') + newLine + (item.offset ? '' : '\n') + readme.slice(item.offset);
- await _promises.default.writeFile(path, readme);
- }
- };
-
- // await replaceInOrder({
- // checkName: 'README',
- // newLine: `{"gitdown": "include", "file": "./rules/${ruleName}.md"}`,
- // oldRegex: /\n\{"gitdown": "include", "file": ".\/rules\/(?[^.]*).md"\}/gu,
- // path: './.README/README.md',
- // });
-
- await replaceInOrder({
- checkName: 'index import',
- newLine: `import ${camelCasedRuleName} from './rules/${camelCasedRuleName}';`,
- oldIsCamel: true,
- oldRegex: /\nimport (?[^ ]*) from '.\/rules\/\1';/gu,
- path: './src/index.js'
- });
- await replaceInOrder({
- checkName: 'index recommended',
- newLine: `${' '.repeat(6)}'jsdoc/${ruleName}': ${recommended ? 'warnOrError' : '\'off\''},`,
- oldRegex: /\n\s{6}'jsdoc\/(?[^']*)': .*?,/gu,
- path: './src/index.js'
- });
- await replaceInOrder({
- checkName: 'index rules',
- newLine: `${' '.repeat(4)}'${ruleName}': ${camelCasedRuleName},`,
- oldRegex: /\n\s{4}'(?[^']*)': [^,]*,/gu,
- path: './src/index.js'
- });
- await Promise.resolve().then(() => _interopRequireWildcard(require('./generateDocs.js')));
-
- /*
- console.log('Paths to open for further editing\n');
- console.log(`open ${ruleReadmePath}`);
- console.log(`open ${rulePath}`);
- console.log(`open ${ruleTestPath}\n`);
- */
-
- // Set chdir as somehow still in operation from other test
- process.chdir((0, _path.resolve)(__dirname, '../../'));
- await (0, _openEditor.default)([
- // Could even add editor line column numbers like `${rulePath}:1:1`
- ruleReadmePath, ruleTestPath, rulePath]);
-})();
-//# sourceMappingURL=generateRule.js.map
\ No newline at end of file
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/exportParser.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/exportParser.js
index 55d4e8a6beaa8d..85bb4b430ea706 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/exportParser.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/exportParser.js
@@ -10,9 +10,27 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
const debug = (0, _debug.default)('requireExportJsdoc');
/**
- * @returns {{
- * props: object
- * }}
+ * @typedef {{
+ * value: string
+ * }} ValueObject
+ */
+
+/**
+ * @typedef {{
+ * type?: string,
+ * value?: ValueObject|import('eslint').Rule.Node,
+ * props: {
+ * [key: string]: CreatedNode|null,
+ * },
+ * special?: true,
+ * globalVars?: CreatedNode,
+ * exported?: boolean,
+ * ANONYMOUS_DEFAULT?: import('eslint').Rule.Node
+ * }} CreatedNode
+ */
+
+/**
+ * @returns {CreatedNode}
*/
const createNode = function () {
return {
@@ -21,19 +39,19 @@ const createNode = function () {
};
/**
- * @param {} symbol
- * @returns {null}
+ * @param {CreatedNode|null} symbol
+ * @returns {string|null}
*/
const getSymbolValue = function (symbol) {
- /* istanbul ignore next */
+ /* istanbul ignore if */
if (!symbol) {
- /* istanbul ignore next */
return null;
}
- /* istanbul ignore next */
+ /* istanbul ignore else */
if (symbol.type === 'literal') {
- return symbol.value.value;
+ return (/** @type {ValueObject} */symbol.value.value
+ );
}
/* istanbul ignore next */
@@ -42,11 +60,11 @@ const getSymbolValue = function (symbol) {
/**
*
- * @param {} node
- * @param {} globals
- * @param {} scope
- * @param {} opts
- * @returns {}
+ * @param {import('estree').Identifier} node
+ * @param {CreatedNode} globals
+ * @param {CreatedNode} scope
+ * @param {SymbolOptions} opts
+ * @returns {CreatedNode|null}
*/
const getIdentifier = function (node, globals, scope, opts) {
if (opts.simpleIdentifier) {
@@ -74,17 +92,35 @@ const getIdentifier = function (node, globals, scope, opts) {
}
return null;
};
-let createSymbol = null;
+
+/**
+ * @callback CreateSymbol
+ * @param {import('eslint').Rule.Node|null} node
+ * @param {CreatedNode} globals
+ * @param {import('eslint').Rule.Node|null} value
+ * @param {CreatedNode} [scope]
+ * @param {boolean|SymbolOptions} [isGlobal]
+ * @returns {CreatedNode|null}
+ */
+
+/** @type {CreateSymbol} */
+let createSymbol; // eslint-disable-line prefer-const
/* eslint-disable complexity -- Temporary */
+/**
+ * @typedef {{
+ * simpleIdentifier?: boolean
+ * }} SymbolOptions
+ */
+
/**
*
- * @param {} node
- * @param {} globals
- * @param {} scope
- * @param {} opt
- * @returns {}
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode} globals
+ * @param {CreatedNode} scope
+ * @param {SymbolOptions} [opt]
+ * @returns {CreatedNode|null}
*/
const getSymbol = function (node, globals, scope, opt) {
/* eslint-enable complexity -- Temporary */
@@ -98,13 +134,15 @@ const getSymbol = function (node, globals, scope, opt) {
}
case 'MemberExpression':
{
- const obj = getSymbol(node.object, globals, scope, opts);
- const propertySymbol = getSymbol(node.property, globals, scope, {
+ const obj = getSymbol( /** @type {import('eslint').Rule.Node} */
+ node.object, globals, scope, opts);
+ const propertySymbol = getSymbol( /** @type {import('eslint').Rule.Node} */
+ node.property, globals, scope, {
simpleIdentifier: !node.computed
});
const propertyValue = getSymbolValue(propertySymbol);
- /* istanbul ignore next */
+ /* istanbul ignore else */
if (obj && propertyValue && obj.props[propertyValue]) {
const block = obj.props[propertyValue];
return block;
@@ -117,16 +155,22 @@ const getSymbol = function (node, globals, scope, opt) {
}
*/
/* istanbul ignore next */
- debug(`MemberExpression: Missing property ${node.property.name}`);
+ debug(`MemberExpression: Missing property ${
+ /** @type {import('estree').PrivateIdentifier} */node.property.name}`);
/* istanbul ignore next */
return null;
}
case 'ClassExpression':
{
- return getSymbol(node.body, globals, scope, opts);
+ return getSymbol( /** @type {import('eslint').Rule.Node} */
+ node.body, globals, scope, opts);
}
+
+ // @ts-expect-error TS OK
case 'TSTypeAliasDeclaration':
+ // @ts-expect-error TS OK
+ // Fallthrough
case 'TSEnumDeclaration':
case 'TSInterfaceDeclaration':
case 'ClassDeclaration':
@@ -143,15 +187,20 @@ const getSymbol = function (node, globals, scope, opt) {
}
case 'AssignmentExpression':
{
- return createSymbol(node.left, globals, node.right, scope, opts);
+ return createSymbol( /** @type {import('eslint').Rule.Node} */
+ node.left, globals, /** @type {import('eslint').Rule.Node} */
+ node.right, scope, opts);
}
case 'ClassBody':
{
const val = createNode();
for (const method of node.body) {
- val.props[method.key.name] = createNode();
- val.props[method.key.name].type = 'object';
- val.props[method.key.name].value = method.value;
+ val.props[/** @type {import('estree').Identifier} */ /** @type {import('estree').MethodDefinition} */method.key.name] = createNode();
+ /** @type {{[key: string]: CreatedNode}} */
+ val.props[/** @type {import('estree').Identifier} */ /** @type {import('estree').MethodDefinition} */method.key.name].type = 'object';
+ /** @type {{[key: string]: CreatedNode}} */
+ val.props[/** @type {import('estree').Identifier} */ /** @type {import('estree').MethodDefinition} */method.key.name].value = /** @type {import('eslint').Rule.Node} */
+ /** @type {import('estree').MethodDefinition} */method.value;
}
val.type = 'object';
val.value = node.parent;
@@ -169,10 +218,13 @@ const getSymbol = function (node, globals, scope, opt) {
'ExperimentalSpreadProperty'].includes(prop.type)) {
continue;
}
- const propVal = getSymbol(prop.value, globals, scope, opts);
- /* istanbul ignore next */
+ const propVal = getSymbol( /** @type {import('eslint').Rule.Node} */
+ /** @type {import('estree').Property} */
+ prop.value, globals, scope, opts);
+ /* istanbul ignore if */
if (propVal) {
- val.props[prop.key.name] = propVal;
+ val.props[/** @type {import('estree').PrivateIdentifier} */
+ /** @type {import('estree').Property} */prop.key.name] = propVal;
}
}
return val;
@@ -192,11 +244,11 @@ const getSymbol = function (node, globals, scope, opt) {
/**
*
- * @param {} block
- * @param {} name
- * @param {} value
- * @param {} globals
- * @param {} isGlobal
+ * @param {CreatedNode} block
+ * @param {string} name
+ * @param {CreatedNode|null} value
+ * @param {CreatedNode} globals
+ * @param {boolean|SymbolOptions|undefined} isGlobal
* @returns {void}
*/
const createBlockSymbol = function (block, name, value, globals, isGlobal) {
@@ -205,34 +257,31 @@ const createBlockSymbol = function (block, name, value, globals, isGlobal) {
globals.props.window.props[name] = value;
}
};
-
-/**
- *
- * @param {} node
- * @param {} globals
- * @param {} value
- * @param {} scope
- * @param {} isGlobal
- * @returns {null}
- */
createSymbol = function (node, globals, value, scope, isGlobal) {
const block = scope || globals;
+ /* istanbul ignore if */
+ if (!node) {
+ return null;
+ }
let symbol;
// eslint-disable-next-line default-case
switch (node.type) {
case 'FunctionDeclaration':
/* istanbul ignore next */
+ // @ts-expect-error TS OK
// Fall through
case 'TSEnumDeclaration':
case 'TSInterfaceDeclaration':
/* istanbul ignore next */
+ // @ts-expect-error TS OK
// Fall through
case 'TSTypeAliasDeclaration':
case 'ClassDeclaration':
{
- /* istanbul ignore next */
- if (node.id && node.id.type === 'Identifier') {
- return createSymbol(node.id, globals, node, globals);
+ const nde = /** @type {import('estree').ClassDeclaration} */node;
+ /* istanbul ignore else */
+ if (nde.id && nde.id.type === 'Identifier') {
+ return createSymbol( /** @type {import('eslint').Rule.Node} */nde.id, globals, node, globals);
}
/* istanbul ignore next */
@@ -240,19 +289,20 @@ createSymbol = function (node, globals, value, scope, isGlobal) {
}
case 'Identifier':
{
+ const nde = /** @type {import('estree').Identifier} */node;
if (value) {
const valueSymbol = getSymbol(value, globals, block);
- /* istanbul ignore next */
+ /* istanbul ignore else */
if (valueSymbol) {
- createBlockSymbol(block, node.name, valueSymbol, globals, isGlobal);
- return block.props[node.name];
+ createBlockSymbol(block, nde.name, valueSymbol, globals, isGlobal);
+ return block.props[nde.name];
}
/* istanbul ignore next */
- debug('Identifier: Missing value symbol for %s', node.name);
+ debug('Identifier: Missing value symbol for %s', nde.name);
} else {
- createBlockSymbol(block, node.name, createNode(), globals, isGlobal);
- return block.props[node.name];
+ createBlockSymbol(block, nde.name, createNode(), globals, isGlobal);
+ return block.props[nde.name];
}
/* istanbul ignore next */
@@ -260,18 +310,18 @@ createSymbol = function (node, globals, value, scope, isGlobal) {
}
case 'MemberExpression':
{
- symbol = getSymbol(node.object, globals, block);
- const propertySymbol = getSymbol(node.property, globals, block, {
- simpleIdentifier: !node.computed
+ const nde = /** @type {import('estree').MemberExpression} */node;
+ symbol = getSymbol( /** @type {import('eslint').Rule.Node} */nde.object, globals, block);
+ const propertySymbol = getSymbol( /** @type {import('eslint').Rule.Node} */nde.property, globals, block, {
+ simpleIdentifier: !nde.computed
});
const propertyValue = getSymbolValue(propertySymbol);
if (symbol && propertyValue) {
- createBlockSymbol(symbol, propertyValue, getSymbol(value, globals, block), globals, isGlobal);
+ createBlockSymbol(symbol, propertyValue, getSymbol( /** @type {import('eslint').Rule.Node} */
+ value, globals, block), globals, isGlobal);
return symbol.props[propertyValue];
}
-
- /* istanbul ignore next */
- debug('MemberExpression: Missing symbol: %s', node.property.name);
+ debug('MemberExpression: Missing symbol: %s', /** @type {import('estree').Identifier} */nde.property.name);
break;
}
}
@@ -280,11 +330,10 @@ createSymbol = function (node, globals, value, scope, isGlobal) {
/**
* Creates variables from variable definitions
- *
- * @param {} node
- * @param {} globals
- * @param {} opts
- * @returns {}
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode} globals
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opts
+ * @returns {void}
*/
const initVariables = function (node, globals, opts) {
// eslint-disable-next-line default-case
@@ -292,23 +341,27 @@ const initVariables = function (node, globals, opts) {
case 'Program':
{
for (const childNode of node.body) {
- initVariables(childNode, globals, opts);
+ initVariables( /** @type {import('eslint').Rule.Node} */
+ childNode, globals, opts);
}
break;
}
case 'ExpressionStatement':
{
- initVariables(node.expression, globals, opts);
+ initVariables( /** @type {import('eslint').Rule.Node} */
+ node.expression, globals, opts);
break;
}
case 'VariableDeclaration':
{
for (const declaration of node.declarations) {
// let and const
- const symbol = createSymbol(declaration.id, globals, null, globals);
+ const symbol = createSymbol( /** @type {import('eslint').Rule.Node} */
+ declaration.id, globals, null, globals);
if (opts.initWindow && node.kind === 'var' && globals.props.window) {
// If var, also add to window
- globals.props.window.props[declaration.id.name] = symbol;
+ globals.props.window.props[/** @type {import('estree').Identifier} */
+ declaration.id.name] = symbol;
}
}
break;
@@ -316,7 +369,8 @@ const initVariables = function (node, globals, opts) {
case 'ExportNamedDeclaration':
{
if (node.declaration) {
- initVariables(node.declaration, globals, opts);
+ initVariables( /** @type {import('eslint').Rule.Node} */
+ node.declaration, globals, opts);
}
break;
}
@@ -327,11 +381,10 @@ const initVariables = function (node, globals, opts) {
/**
* Populates variable maps using AST
- *
- * @param {} node
- * @param {} globals
- * @param {} opt
- * @param {} isExport
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode} globals
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opt
+ * @param {true} [isExport]
* @returns {boolean}
*/
const mapVariables = function (node, globals, opt, isExport) {
@@ -346,25 +399,31 @@ const mapVariables = function (node, globals, opt, isExport) {
return false;
}
for (const childNode of node.body) {
- mapVariables(childNode, globals, opts);
+ mapVariables( /** @type {import('eslint').Rule.Node} */
+ childNode, globals, opts);
}
break;
}
case 'ExpressionStatement':
{
- mapVariables(node.expression, globals, opts);
+ mapVariables( /** @type {import('eslint').Rule.Node} */
+ node.expression, globals, opts);
break;
}
case 'AssignmentExpression':
{
- createSymbol(node.left, globals, node.right);
+ createSymbol( /** @type {import('eslint').Rule.Node} */
+ node.left, globals, /** @type {import('eslint').Rule.Node} */
+ node.right);
break;
}
case 'VariableDeclaration':
{
for (const declaration of node.declarations) {
- const isGlobal = opts.initWindow && node.kind === 'var' && globals.props.window;
- const symbol = createSymbol(declaration.id, globals, declaration.init, globals, isGlobal);
+ const isGlobal = Boolean(opts.initWindow && node.kind === 'var' && globals.props.window);
+ const symbol = createSymbol( /** @type {import('eslint').Rule.Node} */
+ declaration.id, globals, /** @type {import('eslint').Rule.Node} */
+ declaration.init, globals, isGlobal);
if (symbol && isExport) {
symbol.exported = true;
}
@@ -373,19 +432,24 @@ const mapVariables = function (node, globals, opt, isExport) {
}
case 'FunctionDeclaration':
{
- /* istanbul ignore next */
- if (node.id.type === 'Identifier') {
- createSymbol(node.id, globals, node, globals, true);
+ /* istanbul ignore if */
+ if ( /** @type {import('estree').Identifier} */node.id.type === 'Identifier') {
+ createSymbol( /** @type {import('eslint').Rule.Node} */
+ node.id, globals, node, globals, true);
}
break;
}
case 'ExportDefaultDeclaration':
{
- const symbol = createSymbol(node.declaration, globals, node.declaration);
+ const symbol = createSymbol( /** @type {import('eslint').Rule.Node} */
+ node.declaration, globals, /** @type {import('eslint').Rule.Node} */
+ node.declaration);
if (symbol) {
symbol.exported = true;
- } else if (!node.id) {
- globals.ANONYMOUS_DEFAULT = node.declaration;
+ } else {
+ // if (!node.id) {
+ globals.ANONYMOUS_DEFAULT = /** @type {import('eslint').Rule.Node} */
+ node.declaration;
}
break;
}
@@ -393,24 +457,29 @@ const mapVariables = function (node, globals, opt, isExport) {
{
if (node.declaration) {
if (node.declaration.type === 'VariableDeclaration') {
- mapVariables(node.declaration, globals, opts, true);
+ mapVariables( /** @type {import('eslint').Rule.Node} */
+ node.declaration, globals, opts, true);
} else {
- const symbol = createSymbol(node.declaration, globals, node.declaration);
- /* istanbul ignore next */
+ const symbol = createSymbol( /** @type {import('eslint').Rule.Node} */
+ node.declaration, globals, /** @type {import('eslint').Rule.Node} */
+ node.declaration);
+ /* istanbul ignore if */
if (symbol) {
symbol.exported = true;
}
}
}
for (const specifier of node.specifiers) {
- mapVariables(specifier, globals, opts);
+ mapVariables( /** @type {import('eslint').Rule.Node} */
+ specifier, globals, opts);
}
break;
}
case 'ExportSpecifier':
{
- const symbol = getSymbol(node.local, globals, globals);
- /* istanbul ignore next */
+ const symbol = getSymbol( /** @type {import('eslint').Rule.Node} */
+ node.local, globals, globals);
+ /* istanbul ignore if */
if (symbol) {
symbol.exported = true;
}
@@ -418,7 +487,7 @@ const mapVariables = function (node, globals, opt, isExport) {
}
case 'ClassDeclaration':
{
- createSymbol(node.id, globals, node.body, globals);
+ createSymbol( /** @type {import('eslint').Rule.Node|null} */node.id, globals, /** @type {import('eslint').Rule.Node} */node.body, globals);
break;
}
default:
@@ -432,25 +501,27 @@ const mapVariables = function (node, globals, opt, isExport) {
/**
*
- * @param {} node
- * @param {} block
- * @param {} cache
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode|ValueObject|string|undefined|
+ * import('eslint').Rule.Node} block
+ * @param {(CreatedNode|ValueObject|string|
+ * import('eslint').Rule.Node)[]} [cache]
* @returns {boolean}
*/
const findNode = function (node, block, cache) {
let blockCache = cache || [];
- /* istanbul ignore next */
if (!block || blockCache.includes(block)) {
return false;
}
blockCache = blockCache.slice();
blockCache.push(block);
- if ((block.type === 'object' || block.type === 'MethodDefinition') && block.value === node) {
+ if (typeof block === 'object' && 'type' in block && (block.type === 'object' || block.type === 'MethodDefinition') && block.value === node) {
return true;
}
- const {
- props = block.body
- } = block;
+ if (typeof block !== 'object') {
+ return false;
+ }
+ const props = 'props' in block && block.props || 'body' in block && block.body;
for (const propval of Object.values(props || {})) {
if (Array.isArray(propval)) {
/* istanbul ignore if */
@@ -469,8 +540,8 @@ const exportTypes = new Set(['ExportNamedDeclaration', 'ExportDefaultDeclaration
const ignorableNestedTypes = new Set(['FunctionDeclaration', 'ArrowFunctionExpression', 'FunctionExpression']);
/**
- * @param {} nde
- * @returns {}
+ * @param {import('eslint').Rule.Node} nde
+ * @returns {import('eslint').Rule.Node|false}
*/
const getExportAncestor = function (nde) {
let node = nde;
@@ -493,8 +564,8 @@ const canBeExportedByAncestorType = new Set(['TSPropertySignature', 'TSMethodSig
const canExportChildrenType = new Set(['TSInterfaceBody', 'TSInterfaceDeclaration', 'TSTypeLiteral', 'TSTypeAliasDeclaration', 'ClassDeclaration', 'ClassBody', 'ClassDefinition', 'ClassExpression', 'Program']);
/**
- * @param {} nde
- * @returns {}
+ * @param {import('eslint').Rule.Node} nde
+ * @returns {false|import('eslint').Rule.Node}
*/
const isExportByAncestor = function (nde) {
if (!canBeExportedByAncestorType.has(nde.type)) {
@@ -515,13 +586,13 @@ const isExportByAncestor = function (nde) {
/**
*
- * @param {} block
- * @param {} node
- * @param {} cache
+ * @param {CreatedNode} block
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode[]} [cache] Currently unused
* @returns {boolean}
*/
const findExportedNode = function (block, node, cache) {
- /* istanbul ignore next */
+ /* istanbul ignore if */
if (block === null) {
return false;
}
@@ -530,8 +601,9 @@ const findExportedNode = function (block, node, cache) {
props
} = block;
for (const propval of Object.values(props)) {
- blockCache.push(propval);
- if (propval.exported && (node === propval.value || findNode(node, propval.value))) {
+ const pval = /** @type {CreatedNode} */propval;
+ blockCache.push(pval);
+ if (pval.exported && (node === pval.value || findNode(node, pval.value))) {
return true;
}
@@ -544,9 +616,9 @@ const findExportedNode = function (block, node, cache) {
/**
*
- * @param {} node
- * @param {} globals
- * @param {} opt
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode} globals
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opt
* @returns {boolean}
*/
const isNodeExported = function (node, globals, opt) {
@@ -566,9 +638,9 @@ const isNodeExported = function (node, globals, opt) {
/**
*
- * @param {} node
- * @param {} globalVars
- * @param {} opts
+ * @param {import('eslint').Rule.Node} node
+ * @param {CreatedNode} globalVars
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opts
* @returns {boolean}
*/
const parseRecursive = function (node, globalVars, opts) {
@@ -581,14 +653,10 @@ const parseRecursive = function (node, globalVars, opts) {
/**
*
- * @param {} ast
- * @param {} node
- * @param {} opt
- * @returns {{
- * globalVars: {
- * props: {};
- * };
- * }}
+ * @param {import('eslint').Rule.Node} ast
+ * @param {import('eslint').Rule.Node} node
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opt
+ * @returns {CreatedNode}
*/
const parse = function (ast, node, opt) {
/* istanbul ignore next */
@@ -615,16 +683,17 @@ const parse = function (ast, node, opt) {
mapVariables(ast, globalVars, opts);
}
return {
- globalVars
+ globalVars,
+ props: {}
};
};
/**
*
- * @param {} node
- * @param {} sourceCode
- * @param {} opt
- * @param {} settings
+ * @param {import('eslint').Rule.Node} node
+ * @param {import('eslint').SourceCode} sourceCode
+ * @param {import('./rules/requireJsdoc.js').RequireJsdocOpts} opt
+ * @param {import('./iterateJsdoc.js').Settings} settings
* @returns {boolean}
*/
const isUncommentedExport = function (node, sourceCode, opt, settings) {
@@ -645,8 +714,10 @@ const isUncommentedExport = function (node, sourceCode, opt, settings) {
return true;
}
}
- const parseResult = parse(sourceCode.ast, node, opt);
- return isNodeExported(node, parseResult.globalVars, opt);
+ const ast = /** @type {unknown} */sourceCode.ast;
+ const parseResult = parse( /** @type {import('eslint').Rule.Node} */
+ ast, node, opt);
+ return isNodeExported(node, /** @type {CreatedNode} */parseResult.globalVars, opt);
};
var _default = {
isUncommentedExport,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/generateRule.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/generateRule.js
index 881e0bbce2862d..81de2b1f9cf2e3 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/generateRule.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/generateRule.js
@@ -1,10 +1,10 @@
"use strict";
+var _camelcase = _interopRequireDefault(require("camelcase"));
var _fs = require("fs");
var _promises = _interopRequireDefault(require("fs/promises"));
-var _path = require("path");
-var _camelcase = _interopRequireDefault(require("camelcase"));
var _openEditor = _interopRequireDefault(require("open-editor"));
+var _path = require("path");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } /* eslint-disable no-console -- CLI */ /**
@@ -28,7 +28,8 @@ const recommended = options.includes('--recommended');
return;
}
const ruleNamesPath = './test/rules/ruleNames.json';
- const ruleNames = JSON.parse(await _promises.default.readFile(ruleNamesPath, 'utf8'));
+ // @ts-expect-error Older types?
+ const ruleNames = JSON.parse(await _promises.default.readFile(ruleNamesPath));
if (!ruleNames.includes(ruleName)) {
ruleNames.push(ruleName);
ruleNames.sort();
@@ -47,7 +48,7 @@ export default iterateJsdoc(({
meta: {
docs: {
description: '',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-${ruleName}',
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/${ruleName}.md#repos-sticky-header',
},
schema: [
{
@@ -102,7 +103,13 @@ export default iterateJsdoc(({
|Settings||
|Options||
-
+## Failing examples
+
+
+
+## Passing examples
+
+
`;
const ruleReadmePath = `./.README/rules/${ruleName}.md`;
if (!(0, _fs.existsSync)(ruleReadmePath)) {
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/index.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/index.js
index f2dacd1c3d8833..b64ce541c71bd1 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/index.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/index.js
@@ -17,6 +17,7 @@ var _checkTypes = _interopRequireDefault(require("./rules/checkTypes"));
var _checkValues = _interopRequireDefault(require("./rules/checkValues"));
var _emptyTags = _interopRequireDefault(require("./rules/emptyTags"));
var _implementsOnClasses = _interopRequireDefault(require("./rules/implementsOnClasses"));
+var _importsAsDependencies = _interopRequireDefault(require("./rules/importsAsDependencies"));
var _informativeDocs = _interopRequireDefault(require("./rules/informativeDocs"));
var _matchDescription = _interopRequireDefault(require("./rules/matchDescription"));
var _matchName = _interopRequireDefault(require("./rules/matchName"));
@@ -76,6 +77,7 @@ const index = {
'check-values': _checkValues.default,
'empty-tags': _emptyTags.default,
'implements-on-classes': _implementsOnClasses.default,
+ 'imports-as-dependencies': _importsAsDependencies.default,
'informative-docs': _informativeDocs.default,
'match-description': _matchDescription.default,
'match-name': _matchName.default,
@@ -139,6 +141,7 @@ const createRecommendedRuleset = warnOrError => {
'jsdoc/check-values': warnOrError,
'jsdoc/empty-tags': warnOrError,
'jsdoc/implements-on-classes': warnOrError,
+ 'jsdoc/imports-as-dependencies': 'off',
'jsdoc/informative-docs': 'off',
'jsdoc/match-description': 'off',
'jsdoc/match-name': 'off',
@@ -146,7 +149,7 @@ const createRecommendedRuleset = warnOrError => {
'jsdoc/no-bad-blocks': 'off',
'jsdoc/no-blank-block-descriptions': 'off',
'jsdoc/no-blank-blocks': 'off',
- 'jsdoc/no-defaults': 'off',
+ 'jsdoc/no-defaults': warnOrError,
'jsdoc/no-missing-syntax': 'off',
'jsdoc/no-multi-asterisks': warnOrError,
'jsdoc/no-restricted-syntax': 'off',
@@ -197,6 +200,7 @@ const createRecommendedTypeScriptRuleset = warnOrError => {
typed: true
}],
'jsdoc/no-types': warnOrError,
+ 'jsdoc/no-undefined-types': 'off',
'jsdoc/require-param-type': 'off',
'jsdoc/require-property-type': 'off',
'jsdoc/require-returns-type': 'off'
@@ -205,6 +209,23 @@ const createRecommendedTypeScriptRuleset = warnOrError => {
};
};
+/**
+ * @param {"warn"|"error"} warnOrError
+ * @returns {import('eslint').ESLint.ConfigData}
+ */
+const createRecommendedTypeScriptFlavorRuleset = warnOrError => {
+ const ruleset = createRecommendedRuleset(warnOrError);
+ return {
+ ...ruleset,
+ rules: {
+ ...ruleset.rules,
+ /* eslint-disable indent -- Extra indent to avoid use by auto-rule-editing */
+ 'jsdoc/no-undefined-types': 'off'
+ /* eslint-enable indent */
+ }
+ };
+};
+
/* istanbul ignore if -- TS */
if (!index.configs) {
throw new Error('TypeScript guard');
@@ -213,6 +234,8 @@ index.configs.recommended = createRecommendedRuleset('warn');
index.configs['recommended-error'] = createRecommendedRuleset('error');
index.configs['recommended-typescript'] = createRecommendedTypeScriptRuleset('warn');
index.configs['recommended-typescript-error'] = createRecommendedTypeScriptRuleset('error');
+index.configs['recommended-typescript-flavor'] = createRecommendedTypeScriptFlavorRuleset('warn');
+index.configs['recommended-typescript-flavor-error'] = createRecommendedTypeScriptFlavorRuleset('error');
var _default = index;
exports.default = _default;
module.exports = exports.default;
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/iterateJsdoc.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/iterateJsdoc.js
index 4bb3d95efde89d..e5e60e95b9fc53 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/iterateJsdoc.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/iterateJsdoc.js
@@ -11,14 +11,19 @@ Object.defineProperty(exports, "parseComment", {
return _jsdoccomment.parseComment;
}
});
+var _jsdocUtils = _interopRequireDefault(require("./jsdocUtils"));
var _jsdoccomment = require("@es-joy/jsdoccomment");
var _commentParser = require("comment-parser");
-var _jsdocUtils = _interopRequireDefault(require("./jsdocUtils"));
+var _esquery = _interopRequireDefault(require("esquery"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* @typedef {number} Integer
*/
+/**
+ * @typedef {import('@es-joy/jsdoccomment').JsdocBlockWithInline} JsdocBlockWithInline
+ */
+
/**
* @typedef {{
* disallowName?: string,
@@ -28,7 +33,8 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* tags?: string[],
* replacement?: string,
* minimum?: Integer,
- * message?: string
+ * message?: string,
+ * forceRequireReturn?: boolean
* }} ContextObject
*/
/**
@@ -40,9 +46,10 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @param {{
* lastIndex?: Integer,
* isFunctionContext?: boolean,
- * selector?: string
+ * selector?: string,
+ * comment?: string
* }} info
- * @param {null|((jsdoc: import('comment-parser').Block) => boolean|undefined)} handler
+ * @param {null|((jsdoc: import('@es-joy/jsdoccomment').JsdocBlockWithInline) => boolean|undefined)} handler
* @param {import('eslint').Rule.Node} node
* @returns {void}
*/
@@ -51,9 +58,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @callback ForEachPreferredTag
* @param {string} tagName
* @param {(
- * matchingJsdocTag: import('comment-parser').Spec & {
- * line: Integer
- * },
+ * matchingJsdocTag: import('@es-joy/jsdoccomment').JsdocTagWithInline,
* targetTagName: string
* ) => void} arrayHandler
* @param {boolean} [skipReportingBlockedTag]
@@ -113,7 +118,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @returns {string}
*/
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @callback ReportJSDoc
* @param {string} msg
@@ -157,7 +161,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* }}
*/
-/* eslint-disable jsdoc/no-undefined-types -- Bug */
/**
* @callback SetBlockDescription
* @param {(
@@ -182,7 +185,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @returns {Integer}
*/
-/* eslint-disable jsdoc/no-undefined-types -- TS */
/**
* @callback ChangeTag
* @param {import('comment-parser').Spec} tag
@@ -191,7 +193,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
*/
/* eslint-enable jsdoc/no-undefined-types -- TS */
-/* eslint-disable jsdoc/no-undefined-types -- TS */
/**
* @callback SetTag
* @param {import('comment-parser').Spec & {
@@ -225,7 +226,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @returns {Integer|undefined}
*/
-/* eslint-disable jsdoc/no-undefined-types -- TS */
/**
* @typedef {(
* tokens?: Partial | undefined
@@ -235,13 +235,11 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
/**
* Sets tokens to empty string.
- *
* @callback EmptyTokens
* @param {import('comment-parser').Tokens} tokens
* @returns {void}
*/
-/* eslint-disable jsdoc/no-undefined-types -- TS */
/**
* @callback AddLine
* @param {Integer} sourceIndex
@@ -333,7 +331,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
/**
* @callback ComparePaths
* @param {string} name
- * @returns {(otherPathName: string) => void}
+ * @returns {(otherPathName: string) => boolean}
*/
/**
@@ -429,13 +427,13 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
/**
* @callback GetPresentTags
* @param {string[]} tagList
- * @returns {import('comment-parser').Spec[]}
+ * @returns {import('@es-joy/jsdoccomment').JsdocTagWithInline[]}
*/
/**
* @callback FilterTags
- * @param {(tag: import('comment-parser').Spec) => boolean} filter
- * @returns {import('comment-parser').Spec[]}
+ * @param {(tag: import('@es-joy/jsdoccomment').JsdocTagWithInline) => boolean} filter
+ * @returns {import('@es-joy/jsdoccomment').JsdocTagWithInline[]}
*/
/**
@@ -468,9 +466,7 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
/**
* @callback GetClassJsdoc
- * @returns {null|import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').InlineTag[]
- * }}
+ * @returns {null|JsdocBlockWithInline}
*/
/**
@@ -478,6 +474,16 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* @param {string} tagName
* @returns {boolean}
*/
+
+/**
+ * @callback FindContext
+ * @param {Context[]} contexts
+ * @param {string|undefined} comment
+ * @returns {{
+ * foundContext: Context|undefined,
+ * contextStr: string
+ * }}
+ */
/**
* @typedef {BasicUtils & {
* isIteratingFunction: IsIteratingFunction,
@@ -537,7 +543,8 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* hasOptionTag: HasOptionTag,
* getClassNode: GetClassNode,
* getClassJsdoc: GetClassJsdoc,
- * classHasTag: ClassHasTag
+ * classHasTag: ClassHasTag,
+ * findContext: FindContext
* }} Utils
*/
const {
@@ -549,14 +556,11 @@ const {
/**
* Should use ESLint rule's typing.
- *
* @typedef {import('eslint').Rule.RuleMetaData} EslintRuleMeta
*/
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* A plain object for tracking state as needed by rules across iterations.
- *
* @typedef {{
* globalTags: {},
* hasDuplicates: {
@@ -580,7 +584,6 @@ const {
/**
* The Node AST as supplied by the parser.
- *
* @typedef {import('eslint').Rule.Node} Node
*/
@@ -648,7 +651,6 @@ const getBasicUtils = (context, {
return utils;
};
-/* eslint-disable jsdoc/valid-types -- Old version of pratt */
/**
* @callback Report
* @param {string} message
@@ -666,9 +668,7 @@ const getBasicUtils = (context, {
/**
* @param {Node|null} node
- * @param {import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').InlineTag[]
- * }} jsdoc
+ * @param {JsdocBlockWithInline} jsdoc
* @param {import('eslint').AST.Token} jsdocNode
* @param {Settings} settings
* @param {Report} report
@@ -1496,17 +1496,30 @@ const getUtils = (node, jsdoc, jsdocNode, settings, report, context, iteratingAl
for (const matchingJsdocTag of matchingJsdocTags) {
arrayHandler(
/**
- * @type {import('comment-parser').Spec & {
- * line: Integer
- * }}
+ * @type {import('@es-joy/jsdoccomment').JsdocTagWithInline}
*/
matchingJsdocTag, targetTagName);
}
};
+
+ /** @type {FindContext} */
+ utils.findContext = (contexts, comment) => {
+ const foundContext = contexts.find(cntxt => {
+ return typeof cntxt === 'string' ? _esquery.default.matches( /** @type {Node} */node, _esquery.default.parse(cntxt), undefined, {
+ visitorKeys: sourceCode.visitorKeys
+ }) : (!cntxt.context || cntxt.context === 'any' || _esquery.default.matches( /** @type {Node} */node, _esquery.default.parse(cntxt.context), undefined, {
+ visitorKeys: sourceCode.visitorKeys
+ })) && comment === cntxt.comment;
+ });
+ const contextStr = typeof foundContext === 'object' ? foundContext.context ?? 'any' : String(foundContext);
+ return {
+ contextStr,
+ foundContext
+ };
+ };
return utils;
};
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @typedef {{
* [key: string]: false|string|{
@@ -1527,7 +1540,6 @@ const getUtils = (node, jsdoc, jsdocNode, settings, report, context, iteratingAl
*/
/**
* Settings from ESLint types.
- *
* @typedef {{
* maxLines: Integer,
* minLines: Integer,
@@ -1599,7 +1611,6 @@ const getSettings = context => {
/**
* Create the report function
- *
* @callback MakeReport
* @param {import('eslint').Rule.RuleContext} context
* @param {import('estree').Node} commentNode
@@ -1648,7 +1659,6 @@ const makeReport = (context, commentNode) => {
return report;
};
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @typedef {(
* arg: {
@@ -1661,7 +1671,7 @@ const makeReport = (context, commentNode) => {
* },
* state?: StateObject,
* globalState?: Map>,
- * jsdoc?: import('comment-parser').Block,
+ * jsdoc?: JsdocBlockWithInline,
* jsdocNode?: import('eslint').Rule.Node & {
* range: [number, number]
* },
@@ -1686,7 +1696,7 @@ const makeReport = (context, commentNode) => {
* },
* state: StateObject,
* globalState: Map>,
- * jsdoc: import('comment-parser').Block,
+ * jsdoc: JsdocBlockWithInline,
* jsdocNode: import('eslint').Rule.Node & {
* range: [number, number]
* },
@@ -1710,12 +1720,9 @@ const makeReport = (context, commentNode) => {
* isFunctionContext?: boolean,
* }} info
* @param {string} indent
- * @param {import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').InlineTag[]
- * }} jsdoc
+ * @param {JsdocBlockWithInline} jsdoc
* @param {RuleConfig} ruleConfig
* @param {import('eslint').Rule.RuleContext} context
- * @param {string[]} lines
* @param {import('@es-joy/jsdoccomment').Token} jsdocNode
* @param {Node|null} node
* @param {Settings} settings
@@ -1725,7 +1732,7 @@ const makeReport = (context, commentNode) => {
* @param {boolean} [iteratingAll]
* @returns {void}
*/
-const iterate = (info, indent, jsdoc, ruleConfig, context, lines, jsdocNode, node, settings, sourceCode, iterator, state, iteratingAll) => {
+const iterate = (info, indent, jsdoc, ruleConfig, context, jsdocNode, node, settings, sourceCode, iterator, state, iteratingAll) => {
const jsdocNde = /** @type {unknown} */jsdocNode;
const report = makeReport(context, /** @type {import('estree').Node} */
jsdocNde);
@@ -1769,9 +1776,7 @@ const iterate = (info, indent, jsdoc, ruleConfig, context, lines, jsdocNode, nod
/**
* @param {string[]} lines
* @param {import('estree').Comment} jsdocNode
- * @returns {[indent: string, jsdoc: import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').InlineTag[]
- * }]}
+ * @returns {[indent: string, jsdoc: JsdocBlockWithInline]}
*/
const getIndentAndJSDoc = function (lines, jsdocNode) {
const sourceLine = lines[/** @type {import('estree').SourceLocation} */
@@ -1816,7 +1821,6 @@ const getIndentAndJSDoc = function (lines, jsdocNode) {
/**
* Create an eslint rule that iterates over all JSDocs, regardless of whether
* they are attached to a function-like node.
- *
* @param {JsdocVisitor} iterator
* @param {RuleConfig} ruleConfig The rule's configuration
* @param {ContextObject[]|null} [contexts] The `contexts` containing relevant `comment` info.
@@ -1866,7 +1870,7 @@ const iterateAllJsdocs = (iterator, ruleConfig, contexts, additiveCommentContext
comment,
lastIndex: idx,
selector: node === null || node === void 0 ? void 0 : node.type
- }, indent, jsdoc, ruleConfig, context, lines, jsdocNode, /** @type {Node} */
+ }, indent, jsdoc, ruleConfig, context, jsdocNode, /** @type {Node} */
node, /** @type {Settings} */
settings, sourceCode, iterator, state, true);
}
@@ -1891,7 +1895,7 @@ const iterateAllJsdocs = (iterator, ruleConfig, contexts, additiveCommentContext
} : {
lastIndex,
selector: node === null || node === void 0 ? void 0 : node.type
- }, indent, jsdoc, ruleConfig, context, lines, jsdocNode, node, /** @type {Settings} */
+ }, indent, jsdoc, ruleConfig, context, jsdocNode, node, /** @type {Settings} */
settings, sourceCode, iterator, state, true);
}
const settngs = /** @type {Settings} */settings;
@@ -1959,7 +1963,6 @@ const iterateAllJsdocs = (iterator, ruleConfig, contexts, additiveCommentContext
/**
* Create an eslint rule that iterates over all JSDocs, regardless of whether
* they are attached to a function-like node.
- *
* @param {JsdocVisitorBasic} iterator
* @param {RuleConfig} ruleConfig
* @returns {import('eslint').Rule.RuleModule}
@@ -2015,7 +2018,6 @@ function iterateJsdoc(iterator, ruleConfig) {
return {
/**
* The entrypoint for the JSDoc rule.
- *
* @param {import('eslint').Rule.RuleContext} context
* a reference to the context which hold all important information
* like settings and the sourcecode to check.
@@ -2063,7 +2065,6 @@ function iterateJsdoc(iterator, ruleConfig) {
lines
} = sourceCode;
- /* eslint-disable jsdoc/no-undefined-types -- TS */
/** @type {Partial} */
const state = {};
/* eslint-enable jsdoc/no-undefined-types -- TS */
@@ -2083,7 +2084,7 @@ function iterateJsdoc(iterator, ruleConfig) {
handler && handler(jsdoc) === false) {
return;
}
- iterate(info, indent, jsdoc, ruleConfig, context, lines, jsdocNode, node, settings, sourceCode, iterator, /** @type {StateObject} */
+ iterate(info, indent, jsdoc, ruleConfig, context, jsdocNode, node, settings, sourceCode, iterator, /** @type {StateObject} */
state);
};
@@ -2101,7 +2102,7 @@ function iterateJsdoc(iterator, ruleConfig) {
if (typeof ruleConfig.exit === 'function') {
contextObject['Program:exit'] = () => {
const ste = /** @type {StateObject} */state;
- /* eslint-disable jsdoc/no-undefined-types -- Bug */
+
// @ts-expect-error `utils` not needed at this point
/** @type {Required} */
ruleConfig.exit({
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/jsdocUtils.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/jsdocUtils.js
index 01fea439648483..d0fb931865b654 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/jsdocUtils.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/jsdocUtils.js
@@ -4,11 +4,11 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _jsdoccomment = require("@es-joy/jsdoccomment");
-var _WarnSettings = _interopRequireDefault(require("./WarnSettings"));
var _getDefaultTagStructureForMode = _interopRequireDefault(require("./getDefaultTagStructureForMode"));
var _tagNames = require("./tagNames");
var _hasReturnValue = require("./utils/hasReturnValue");
+var _WarnSettings = _interopRequireDefault(require("./WarnSettings"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* @typedef {number} Integer
@@ -33,45 +33,47 @@ const setTagStructure = mode => {
tagStructure = (0, _getDefaultTagStructureForMode.default)(mode);
};
-/**
- * @typedef {{
- * hasPropertyRest: boolean,
- * hasRestElement: boolean,
- * names: string[],
- * rests: boolean[],
- * }} FlattendRootInfo
- */
-
/**
* @typedef {undefined|string|{
+ * name: Integer,
+ * restElement: boolean
+ * }|{
* isRestProperty: boolean|undefined,
* name: string,
- * restElement: true
- * }|[undefined|string, FlattendRootInfo & {
- * annotationParamName?: string
+ * restElement: boolean
* }|{
- * name: Integer,
+ * name: string,
* restElement: boolean
- * }[]]} ParamNameInfo
+ * }} ParamCommon
+ */
+/**
+ * @typedef {ParamCommon|[string|undefined, (FlattendRootInfo & {
+ * annotationParamName?: string,
+ * })]|NestedParamInfo} ParamNameInfo
*/
/**
- * @typedef {undefined|string|{
- * isRestProperty: boolean,
- * restElement: boolean,
- * name: string
- * }|[string, {
+ * @typedef {{
* hasPropertyRest: boolean,
* hasRestElement: boolean,
* names: string[],
* rests: boolean[],
- * }]|[string, string[]]} ParamInfo
+ * }} FlattendRootInfo
+ */
+/**
+ * @typedef {[string, (string[]|ParamInfo[])]} NestedParamInfo
+ */
+/**
+ * @typedef {ParamCommon|
+ * [string|undefined, (FlattendRootInfo & {
+ * annotationParamName?: string
+ * })]|
+ * NestedParamInfo} ParamInfo
*/
/**
* Given a nested array of property names, reduce them to a single array,
- * appending the name of the root element along the way if present.
- *
+ * appending the name of the root element along the way if present.
* @callback FlattenRoots
* @param {ParamInfo[]} params
* @param {string} [root]
@@ -114,21 +116,21 @@ const flattenRoots = (params, root = '') => {
if (flattened.hasPropertyRest) {
hasPropertyRest = true;
}
- const inner = [root ? `${root}.${cur[0]}` : cur[0], ...flattened.names].filter(Boolean);
+ const inner = /** @type {string[]} */[root ? `${root}.${cur[0]}` : cur[0], ...flattened.names].filter(Boolean);
rests.push(false, ...flattened.rests);
return acc.concat(inner);
}
if (typeof cur === 'object') {
- if (cur.isRestProperty) {
+ if ('isRestProperty' in cur && cur.isRestProperty) {
hasPropertyRest = true;
rests.push(true);
} else {
rests.push(false);
}
- if (cur.restElement) {
+ if ('restElement' in cur && cur.restElement) {
hasRestElement = true;
}
- acc.push(root ? `${root}.${cur.name}` : cur.name);
+ acc.push(root ? `${root}.${String(cur.name)}` : String(cur.name));
} else if (typeof cur !== 'undefined') {
rests.push(false);
acc.push(root ? `${root}.${cur}` : cur);
@@ -181,10 +183,14 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
* import('estree').RestElement|import('estree').ArrayPattern|
* import('@typescript-eslint/types').TSESTree.TSParameterProperty|
* import('@typescript-eslint/types').TSESTree.Property|
- * import('@typescript-eslint/types').TSESTree.RestElement
+ * import('@typescript-eslint/types').TSESTree.RestElement|
+ * import('@typescript-eslint/types').TSESTree.Identifier|
+ * import('@typescript-eslint/types').TSESTree.ObjectPattern|
+ * import('@typescript-eslint/types').TSESTree.BindingName|
+ * import('@typescript-eslint/types').TSESTree.Parameter
* } param
* @param {boolean} [isProperty]
- * @returns {ParamNameInfo}
+ * @returns {ParamNameInfo|[string, ParamNameInfo[]]}
*/
const getParamName = (param, isProperty) => {
var _param$left2;
@@ -228,16 +234,22 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
// eslint-disable-next-line default-case
switch (param.value.type) {
case 'ArrayPattern':
- return [param.key.name, /** @type {import('estree').ArrayPattern} */param.value.elements.map((prop, idx) => {
- return {
- name: idx,
- restElement: (prop === null || prop === void 0 ? void 0 : prop.type) === 'RestElement'
- };
- })];
+ {
+ return [/** @type {import('estree').Identifier} */
+ param.key.name, /** @type {import('estree').ArrayPattern} */param.value.elements.map((prop, idx) => {
+ return {
+ name: idx,
+ restElement: (prop === null || prop === void 0 ? void 0 : prop.type) === 'RestElement'
+ };
+ })];
+ }
case 'ObjectPattern':
- return [param.key.name, /** @type {import('estree').ObjectPattern} */param.value.properties.map(prop => {
- return getParamName(prop, isProperty);
- })];
+ {
+ return [/** @type {import('estree').Identifier} */param.key.name, /** @type {import('estree').ObjectPattern} */param.value.properties.map(prop => {
+ return (/** @type {string|[string, string[]]} */getParamName(prop, isProperty)
+ );
+ })];
+ }
case 'AssignmentPattern':
{
// eslint-disable-next-line default-case
@@ -245,17 +257,21 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
case 'Identifier':
// Default parameter
if (checkDefaultObjects && param.value.right.type === 'ObjectExpression') {
- return [param.key.name, /** @type {import('estree').AssignmentPattern} */param.value.right.properties.map(prop => {
- return getParamName(prop, isProperty);
+ return [/** @type {import('estree').Identifier} */param.key.name, /** @type {import('estree').AssignmentPattern} */param.value.right.properties.map(prop => {
+ return (/** @type {string} */getParamName( /** @type {import('estree').Property} */
+ prop, isProperty)
+ );
})];
}
break;
case 'ObjectPattern':
- return [param.key.name, /** @type {import('estree').ObjectPattern} */param.value.left.properties.map(prop => {
+ return [/** @type {import('estree').Identifier} */
+ param.key.name, /** @type {import('estree').ObjectPattern} */param.value.left.properties.map(prop => {
return getParamName(prop, isProperty);
})];
case 'ArrayPattern':
- return [param.key.name, /** @type {import('estree').ArrayPattern} */param.value.left.elements.map((prop, idx) => {
+ return [/** @type {import('estree').Identifier} */
+ param.key.name, /** @type {import('estree').ArrayPattern} */param.value.left.elements.map((prop, idx) => {
return {
name: idx,
restElement: (prop === null || prop === void 0 ? void 0 : prop.type) === 'RestElement'
@@ -270,9 +286,10 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
// The key of an object could also be a string or number
case 'Literal':
- return param.key.raw ||
- // istanbul ignore next -- `raw` may not be present in all parsers
- param.key.value;
+ return (/** @type {string} */param.key.raw ||
+ // istanbul ignore next -- `raw` may not be present in all parsers
+ param.key.value
+ );
// case 'MemberExpression':
default:
@@ -283,9 +300,9 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
return undefined;
}
}
- if (param.type === 'ArrayPattern' || ((_param$left2 = param.left) === null || _param$left2 === void 0 ? void 0 : _param$left2.type) === 'ArrayPattern') {
+ if (param.type === 'ArrayPattern' || /** @type {import('estree').AssignmentPattern} */((_param$left2 = param.left) === null || _param$left2 === void 0 ? void 0 : _param$left2.type) === 'ArrayPattern') {
var _param$left3;
- const elements = /** @type {import('estree').ArrayPattern} */param.elements || ( /** @type {import('estree').ArrayPattern} */(_param$left3 = param.left) === null || _param$left3 === void 0 ? void 0 : _param$left3.elements);
+ const elements = /** @type {import('estree').ArrayPattern} */param.elements || ( /** @type {import('estree').ArrayPattern} */(_param$left3 = /** @type {import('estree').AssignmentPattern} */param.left) === null || _param$left3 === void 0 ? void 0 : _param$left3.elements);
const roots = elements.map((prop, idx) => {
return {
name: `"${idx}"`,
@@ -297,19 +314,20 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
if (['RestElement', 'ExperimentalRestProperty'].includes(param.type)) {
return {
isRestProperty: isProperty,
- name: param.argument.name,
+ name: /** @type {import('@typescript-eslint/types').TSESTree.Identifier} */ /** @type {import('@typescript-eslint/types').TSESTree.RestElement} */param.argument.name,
restElement: true
};
}
if (param.type === 'TSParameterProperty') {
- return getParamName(param.parameter, true);
+ return getParamName( /** @type {import('@typescript-eslint/types').TSESTree.Identifier} */
+ /** @type {import('@typescript-eslint/types').TSESTree.TSParameterProperty} */param.parameter, true);
}
throw new Error(`Unsupported function signature format: \`${param.type}\`.`);
};
if (!functionNode) {
return [];
}
- return (functionNode.params || ((_functionNode$value = functionNode.value) === null || _functionNode$value === void 0 ? void 0 : _functionNode$value.params) || []).map(param => {
+ return ( /** @type {import('@typescript-eslint/types').TSESTree.FunctionDeclaration} */functionNode.params || ( /** @type {import('@typescript-eslint/types').TSESTree.MethodDefinition} */(_functionNode$value = functionNode.value) === null || _functionNode$value === void 0 ? void 0 : _functionNode$value.params) || []).map(param => {
return getParamName(param);
});
};
@@ -320,13 +338,13 @@ const getFunctionParameterNames = (functionNode, checkDefaultObjects) => {
*/
const hasParams = functionNode => {
// Should also check `functionNode.value.params` if supporting `MethodDefinition`
- return functionNode.params.length;
+ return (/** @type {import('@typescript-eslint/types').TSESTree.FunctionDeclaration} */functionNode.params.length
+ );
};
/**
* Gets all names of the target type, including those that refer to a path, e.g.
* "@param foo; @param foo.bar".
- *
* @param {import('comment-parser').Block} jsdoc
* @param {string} targetTagName
* @returns {{
@@ -373,6 +391,10 @@ const getTagNamesForMode = (mode, context) => {
if (!modeWarnSettings.hasBeenWarned(context, 'mode')) {
context.report({
loc: {
+ end: {
+ column: 1,
+ line: 1
+ },
start: {
column: 1,
line: 1
@@ -444,9 +466,7 @@ const isValidTag = (context, mode, name, definedTags) => {
};
/**
- * @param {import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').InlineTag[]
- * }} jsdoc
+ * @param {import('./iterateJsdoc.js').JsdocBlockWithInline} jsdoc
* @param {string} targetTagName
* @returns {boolean}
*/
@@ -459,10 +479,7 @@ const hasTag = (jsdoc, targetTagName) => {
/**
* Get all tags, inline tags and inline tags in tags
- *
- * @param {import('comment-parser').Block & {
- * inlineTags: import('@es-joy/jsdoccomment').JsdocInlineTagNoType[]
- * }} jsdoc
+ * @param {import('./iterateJsdoc.js').JsdocBlockWithInline} jsdoc
* @returns {(import('comment-parser').Spec|
* import('@es-joy/jsdoccomment').JsdocInlineTagNoType)[]}
*/
@@ -495,7 +512,8 @@ const getAllTags = jsdoc => {
}
}
for (const inlineTag of tag.inlineTags) {
- let line;
+ /** @type {import('./iterateJsdoc.js').Integer} */
+ let line = 0;
for (const {
number,
tokens: {
@@ -521,7 +539,7 @@ const getAllTags = jsdoc => {
};
/**
- * @param {import('comment-parser').Block} jsdoc
+ * @param {import('./iterateJsdoc.js').JsdocBlockWithInline} jsdoc
* @param {string[]} targetTagNames
* @returns {boolean}
*/
@@ -533,7 +551,6 @@ const hasATag = (jsdoc, targetTagNames) => {
/**
* Checks if the JSDoc comment has an undefined type.
- *
* @param {import('comment-parser').Spec|null|undefined} tag
* the tag which should be checked.
* @param {ParserMode} mode
@@ -685,7 +702,7 @@ const tagMightHaveTypePosition = (tag, tagMap = tagStructure) => {
return true;
}
const tagStruct = ensureMap(tagMap, tag);
- const ret = tagStruct.get('typeAllowed');
+ const ret = /** @type {boolean|undefined} */tagStruct.get('typeAllowed');
return ret === undefined ? true : ret;
};
const namepathTypes = new Set(['namepath-defining', 'namepath-referencing']);
@@ -708,7 +725,8 @@ const tagMightHaveNamePosition = (tag, tagMap = tagStructure) => {
*/
const tagMightHaveNamepath = (tag, tagMap = tagStructure) => {
const tagStruct = ensureMap(tagMap, tag);
- return namepathTypes.has(tagStruct.get('namepathRole'));
+ const nampathRole = tagStruct.get('namepathRole');
+ return nampathRole !== false && namepathTypes.has( /** @type {string} */nampathRole);
};
/**
@@ -759,7 +777,7 @@ const tagMissingRequiredTypeOrNamepath = (tag, tagMap = tagStructure) => {
/* eslint-disable complexity -- Temporary */
/**
- * @param {ESTreeOrTypeScriptNode} node
+ * @param {ESTreeOrTypeScriptNode|null|undefined} node
* @param {boolean} [checkYieldReturnValue]
* @returns {boolean}
*/
@@ -805,7 +823,8 @@ const hasNonFunctionYield = (node, checkYieldReturnValue) => {
}
case 'TryStatement':
{
- return hasNonFunctionYield(node.block, checkYieldReturnValue) || hasNonFunctionYield(node.handler && node.handler.body, checkYieldReturnValue) || hasNonFunctionYield(node.finalizer, checkYieldReturnValue);
+ return hasNonFunctionYield(node.block, checkYieldReturnValue) || hasNonFunctionYield(node.handler && node.handler.body, checkYieldReturnValue) || hasNonFunctionYield( /** @type {import('@typescript-eslint/types').TSESTree.BlockStatement} */
+ node.finalizer, checkYieldReturnValue);
}
case 'SwitchStatement':
{
@@ -866,8 +885,11 @@ const hasNonFunctionYield = (node, checkYieldReturnValue) => {
// @ts-expect-error In Babel?
// istanbul ignore next -- In Babel?
case 'ObjectMethod':
+ // @ts-expect-error In Babel?
// istanbul ignore next -- In Babel?
- return node.computed && hasNonFunctionYield(node.key, checkYieldReturnValue) || node.arguments.some(nde => {
+ return node.computed && hasNonFunctionYield(node.key, checkYieldReturnValue) ||
+ // @ts-expect-error In Babel?
+ node.arguments.some(nde => {
return hasNonFunctionYield(nde, checkYieldReturnValue);
});
case 'SpreadElement':
@@ -898,7 +920,7 @@ const hasNonFunctionYield = (node, checkYieldReturnValue) => {
case 'YieldExpression':
{
if (checkYieldReturnValue) {
- if (node.parent.type === 'VariableDeclarator') {
+ if ( /** @type {import('eslint').Rule.Node} */node.parent.type === 'VariableDeclarator') {
return true;
}
return false;
@@ -919,18 +941,18 @@ const hasNonFunctionYield = (node, checkYieldReturnValue) => {
/**
* Checks if a node has a return statement. Void return does not count.
- *
* @param {ESTreeOrTypeScriptNode} node
* @param {boolean} [checkYieldReturnValue]
* @returns {boolean}
*/
const hasYieldValue = (node, checkYieldReturnValue) => {
- return node.generator && (node.expression || hasNonFunctionYield(node.body, checkYieldReturnValue));
+ return (/** @type {import('@typescript-eslint/types').TSESTree.FunctionDeclaration} */node.generator && ( /** @type {import('@typescript-eslint/types').TSESTree.FunctionDeclaration} */node.expression || hasNonFunctionYield( /** @type {import('@typescript-eslint/types').TSESTree.FunctionDeclaration} */
+ node.body, checkYieldReturnValue))
+ );
};
/**
* Checks if a node has a throws statement.
- *
* @param {ESTreeOrTypeScriptNode|null|undefined} node
* @param {boolean} [innerFunction]
* @returns {boolean}
@@ -1007,9 +1029,8 @@ const isInlineTag = (tag) => {
/**
* Parses GCC Generic/Template types
- *
- * @see {https://github.com/google/closure-compiler/wiki/Generic-Types}
- * @see {https://www.typescriptlang.org/docs/handbook/jsdoc-supported-types.html#template}
+ * @see {@link https://github.com/google/closure-compiler/wiki/Generic-Types}
+ * @see {@link https://www.typescriptlang.org/docs/handbook/jsdoc-supported-types.html#template}
* @param {import('comment-parser').Spec} tag
* @returns {string[]}
*/
@@ -1025,9 +1046,8 @@ const parseClosureTemplateTag = tag => {
/**
* Checks user option for `contexts` array, defaulting to
- * contexts designated by the rule. Returns an array of
- * ESTree AST types, indicating allowable contexts.
- *
+ * contexts designated by the rule. Returns an array of
+ * ESTree AST types, indicating allowable contexts.
* @param {import('eslint').Rule.RuleContext} context
* @param {DefaultContexts|undefined} defaultContexts
* @param {{
@@ -1044,14 +1064,17 @@ const enforcedContexts = (context, defaultContexts, settings) => {
/**
* @param {import('./iterateJsdoc.js').Context[]} contexts
* @param {import('./iterateJsdoc.js').CheckJsdoc} checkJsdoc
- * @param {Function} [handler]
+ * @param {import('@es-joy/jsdoccomment').CommentHandler} [handler]
* @returns {import('eslint').Rule.RuleListener}
*/
const getContextObject = (contexts, checkJsdoc, handler) => {
/** @type {import('eslint').Rule.RuleListener} */
const properties = {};
for (const [idx, prop] of contexts.entries()) {
+ /** @type {string} */
let property;
+
+ /** @type {(node: import('eslint').Rule.Node) => void} */
let value;
if (typeof prop === 'object') {
const selInfo = {
@@ -1059,13 +1082,18 @@ const getContextObject = (contexts, checkJsdoc, handler) => {
selector: prop.context
};
if (prop.comment) {
- property = prop.context;
+ property = /** @type {string} */prop.context;
value = checkJsdoc.bind(null, {
...selInfo,
comment: prop.comment
- }, handler.bind(null, prop.comment));
+ },
+ /**
+ * @type {(jsdoc: import('@es-joy/jsdoccomment').JsdocBlockWithInline) => boolean}
+ */
+ /** @type {import('@es-joy/jsdoccomment').CommentHandler} */
+ handler.bind(null, prop.comment));
} else {
- property = prop.context;
+ property = /** @type {string} */prop.context;
value = checkJsdoc.bind(null, selInfo, null);
}
} else {
@@ -1076,10 +1104,18 @@ const getContextObject = (contexts, checkJsdoc, handler) => {
property = prop;
value = checkJsdoc.bind(null, selInfo, null);
}
- const old = properties[property];
- properties[property] = old ? function (...args) {
- old(...args);
- value(...args);
+ const old =
+ /**
+ * @type {((node: import('eslint').Rule.Node) => void)}
+ */
+ properties[property];
+ properties[property] = old ?
+ /**
+ * @type {((node: import('eslint').Rule.Node) => void)}
+ */
+ function (node) {
+ old(node);
+ value(node);
} : value;
}
return properties;
@@ -1088,10 +1124,9 @@ const tagsWithNamesAndDescriptions = new Set(['param', 'arg', 'argument', 'prope
// These two are parsed by our custom parser as though having a `name`
'returns', 'return']);
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @typedef {{
- * [key: string]: false|
+ * [key: string]: false|string|
* {message: string, replacement?: string}
* }} TagNamePreference
*/
@@ -1211,7 +1246,7 @@ const hasAccessorPair = node => {
};
/**
- * @param {import('comment-parser').Block} jsdoc
+ * @param {import('./iterateJsdoc.js').JsdocBlockWithInline} jsdoc
* @param {import('eslint').Rule.Node|null} node
* @param {import('eslint').Rule.RuleContext} context
* @param {import('json-schema').JSONSchema4} schema
@@ -1237,17 +1272,16 @@ const exemptSpeciaMethods = (jsdoc, node, context, schema) => {
* identifier or numeric literal) or single or double quoted, in either
* the `@param` or in source, we need to strip the quotes to give a fair
* comparison.
- *
* @param {string} str
* @returns {string}
*/
const dropPathSegmentQuotes = str => {
- return str.replace(/\.(['"])(.*)\1/gu, '.$2');
+ return str.replaceAll(/\.(['"])(.*)\1/gu, '.$2');
};
/**
* @param {string} name
- * @returns {(otherPathName: string) => void}
+ * @returns {(otherPathName: string) => boolean}
*/
const comparePaths = name => {
return otherPathName => {
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAccess.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAccess.js
index 3d504e45bc2fd8..aa8505aa41ada3 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAccess.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAccess.js
@@ -31,7 +31,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Checks that `@access` tags have a valid value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-access'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-access.md#repos-sticky-header'
},
type: 'suggestion'
}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAlignment.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAlignment.js
index 41a475f991b82f..01c3d2dd0f30e4 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAlignment.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkAlignment.js
@@ -50,7 +50,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports invalid alignment of JSDoc block asterisks.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-alignment'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-alignment.md#repos-sticky-header'
},
fixable: 'code',
type: 'layout'
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkExamples.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkExamples.js
index d1575ba6b15589..3918796b635e88 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkExamples.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkExamples.js
@@ -4,9 +4,9 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
+var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
var _eslint = require("eslint");
var _semver = _interopRequireDefault(require("semver"));
-var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Todo: When replace `CLIEngine` with `ESLint` when feature set complete per https://github.com/eslint/eslint/issues/14745
// https://github.com/eslint/eslint/blob/master/docs/user-guide/migrating-to-7.0.0.md#-the-cliengine-class-has-been-deprecated
@@ -24,7 +24,7 @@ const hasCaptionRegex = /^\s*([\s\S]*?)<\/caption>/u;
* @returns {string}
*/
const escapeStringRegexp = str => {
- return str.replace(/[.*+?^${}()|[\]\\]/gu, '\\$&');
+ return str.replaceAll(/[.*+?^${}()|[\]\\]/gu, '\\$&');
};
/**
@@ -210,7 +210,7 @@ var _default = (0, _iterateJsdoc.default)(({
useEslintrc: checkEslintrc
};
const cliConfigStr = JSON.stringify(cliConfig);
- const src = paddedIndent ? string.replace(new RegExp(`(^|\n) {${paddedIndent}}(?!$)`, 'gu'), '\n') : string;
+ const src = paddedIndent ? string.replaceAll(new RegExp(`(^|\n) {${paddedIndent}}(?!$)`, 'gu'), '\n') : string;
// Programmatic ESLint API: https://eslint.org/docs/developer-guide/nodejs-api
const fileNameMapKey = filename ? 'a' + cliConfigStr + filename : 'b' + cliConfigStr + defaultFileName;
@@ -431,7 +431,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Ensures that (JavaScript) examples within JSDoc adhere to ESLint rules.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-examples'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-examples.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkIndentation.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkIndentation.js
index 81695f42866578..6f0af91d05f8d8 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkIndentation.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkIndentation.js
@@ -24,7 +24,7 @@ const maskExcludedContent = (str, excludeTags) => {
*/
const maskCodeBlocks = str => {
const regContent = /([ \t]+\*)[ \t]```[^\n]*?([\w|\W]*?\n)(?=[ \t]*\*(?:[ \t]*(?:```|@\w+\s)|\/))/gu;
- return str.replace(regContent, (_match, margin, code) => {
+ return str.replaceAll(regContent, (_match, margin, code) => {
return (margin + '\n').repeat(code.match(/\n/gu).length);
});
};
@@ -52,7 +52,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports invalid padding inside JSDoc blocks.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-indentation'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-indentation.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkLineAlignment.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkLineAlignment.js
index 0daea79c2a515d..682a58c0dd73b2 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkLineAlignment.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkLineAlignment.js
@@ -4,9 +4,9 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _commentParser = require("comment-parser");
var _alignTransform = _interopRequireDefault(require("../alignTransform"));
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _commentParser = require("comment-parser");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const {
flow: commentFlow
@@ -83,7 +83,7 @@ const checkNotAlignedPerTag = (utils, tag, customSpacings) => {
};
const postHyphenSpacing = (customSpacings === null || customSpacings === void 0 ? void 0 : customSpacings.postHyphen) ?? 1;
const exactHyphenSpacing = new RegExp(`^\\s*-\\s{${postHyphenSpacing},${postHyphenSpacing}}(?!\\s)`, 'u');
- const hasNoHyphen = !/^\s*-(?!$)/u.test(tokens.description);
+ const hasNoHyphen = !/^\s*-(?!$)(?=\s)/u.test(tokens.description);
const hasExactHyphenSpacing = exactHyphenSpacing.test(tokens.description);
// If checking alignment on multiple lines, need to check other `source`
@@ -124,7 +124,7 @@ const checkNotAlignedPerTag = (utils, tag, customSpacings) => {
}
}
if (!hasExactHyphenSpacing) {
- const hyphenSpacing = /^\s*-\s*/u;
+ const hyphenSpacing = /^\s*-\s+/u;
tokens.description = tokens.description.replace(hyphenSpacing, '-' + ''.padStart(postHyphenSpacing, ' '));
}
utils.setTag(tag, tokens);
@@ -261,7 +261,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports invalid alignment of JSDoc block lines.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-line-alignment'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-line-alignment.md#repos-sticky-header'
},
fixable: 'whitespace',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkParamNames.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkParamNames.js
index 3908c0f7782fc9..e8b0188c628d9d 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkParamNames.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkParamNames.js
@@ -72,7 +72,12 @@ const validateParameterNames = (targetTagName, allowExtraTrailingParamDocs, chec
hasPropertyRest,
rests,
annotationParamName
- }] = functionParameterName;
+ }] =
+ /**
+ * @type {[string | undefined, import('../jsdocUtils.js').FlattendRootInfo & {
+ * annotationParamName?: string | undefined;
+ }]} */
+ functionParameterName;
if (annotationParamName !== undefined) {
const name = tag.name.trim();
if (name !== annotationParamName) {
@@ -90,6 +95,8 @@ const validateParameterNames = (targetTagName, allowExtraTrailingParamDocs, chec
return paramTag.type;
});
const missingProperties = [];
+
+ /** @type {string[]} */
const notCheckingNames = [];
for (const [idx, name] of expectedNames.entries()) {
if (notCheckingNames.some(notCheckingName => {
@@ -128,6 +135,7 @@ const validateParameterNames = (targetTagName, allowExtraTrailingParamDocs, chec
}
}
if (!hasPropertyRest || checkRestProperty) {
+ /** @type {[string, import('comment-parser').Spec][]} */
const extraProperties = [];
for (const [idx, name] of actualNames.entries()) {
const match = name.startsWith(tag.name.trim() + '.');
@@ -164,7 +172,12 @@ const validateParameterNames = (targetTagName, allowExtraTrailingParamDocs, chec
});
const expectedNames = functionParameterNames.map((item, idx) => {
var _item$;
- if (item !== null && item !== void 0 && (_item$ = item[1]) !== null && _item$ !== void 0 && _item$.names) {
+ if (
+ /**
+ * @type {[string|undefined, (import('../jsdocUtils.js').FlattendRootInfo & {
+ * annotationParamName?: string,
+ })]} */
+ item !== null && item !== void 0 && (_item$ = item[1]) !== null && _item$ !== void 0 && _item$.names) {
return actualNames[idx];
}
return item;
@@ -233,11 +246,11 @@ var _default = (0, _iterateJsdoc.default)(({
} = context.options[0] || {};
const checkTypesRegex = utils.getRegexFromString(checkTypesPattern);
const jsdocParameterNamesDeep = utils.getJsdocTagsDeep('param');
- if (!jsdocParameterNamesDeep.length) {
+ if (!jsdocParameterNamesDeep || !jsdocParameterNamesDeep.length) {
return;
}
const functionParameterNames = utils.getFunctionParameterNames(useDefaultObjectProperties);
- const targetTagName = utils.getPreferredTagName({
+ const targetTagName = /** @type {string} */utils.getPreferredTagName({
tagName: 'param'
});
const isError = validateParameterNames(targetTagName, allowExtraTrailingParamDocs, checkDestructured, checkRestProperty, checkTypesRegex, disableExtraPropertyReporting, enableFixer, functionParameterNames, jsdoc, utils, report);
@@ -249,7 +262,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Ensures that parameter names in JSDoc match those in the function declaration.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-param-names'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-param-names.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkPropertyNames.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkPropertyNames.js
index d33d1af4e1de51..b5c07e13f976ab 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkPropertyNames.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkPropertyNames.js
@@ -97,7 +97,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Ensures that property names in JSDoc are not duplicated on the same block and that nested properties have defined roots.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-property-names'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-property-names.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkSyntax.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkSyntax.js
index 13254ab7ccc892..620185b20cfd2f 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkSyntax.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkSyntax.js
@@ -29,7 +29,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports against syntax not valid for the mode (e.g., Google Closure Compiler in non-Closure mode).',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-syntax'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-syntax.md#repos-sticky-header'
},
type: 'suggestion'
}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTagNames.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTagNames.js
index 84ca710dd2a0bf..cde8a87388d49f 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTagNames.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTagNames.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _escapeStringRegexp = _interopRequireDefault(require("escape-string-regexp"));
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _escapeStringRegexp = _interopRequireDefault(require("escape-string-regexp"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// https://babeljs.io/docs/en/babel-plugin-transform-react-jsx/
const jsxTagNames = new Set(['jsx', 'jsxFrag', 'jsxImportSource', 'jsxRuntime']);
@@ -58,9 +58,7 @@ var _default = (0, _iterateJsdoc.default)(({
utils.reportSettings('Invalid `settings.jsdoc.tagNamePreference`. Values must be falsy, a string, or an object.');
}
return preferredTag.replacement;
- }).filter(preferredType => {
- return preferredType;
- });
+ }).filter(Boolean);
}
/**
@@ -95,7 +93,6 @@ var _default = (0, _iterateJsdoc.default)(({
return true;
};
- /* eslint-disable jsdoc/no-undefined-types -- TS */
/**
* @param {string} message
* @param {import('comment-parser').Spec} jsdocTag
@@ -104,7 +101,6 @@ var _default = (0, _iterateJsdoc.default)(({
* @returns {void}
*/
const reportWithTagRemovalFixer = (message, jsdocTag, tagIndex, additionalTagChanges) => {
- /* eslint-enable jsdoc/no-undefined-types -- TS */
utils.reportJSDoc(message, jsdocTag, enableFixer ? () => {
if (jsdocTag.description.trim()) {
utils.changeTag(jsdocTag, {
@@ -188,7 +184,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports invalid block tag names.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-tag-names'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-tag-names.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTypes.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTypes.js
index ba68a317319fe8..cf281a27de398e 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTypes.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkTypes.js
@@ -4,15 +4,14 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _jsdoccomment = require("@es-joy/jsdoccomment");
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const strictNativeTypes = ['undefined', 'null', 'boolean', 'number', 'bigint', 'string', 'symbol', 'object', 'Array', 'Function', 'Date', 'RegExp'];
/**
* Adjusts the parent type node `meta` for generic matches (or type node
* `type` for `JsdocTypeAny`) and sets the type node `value`.
- *
* @param {string} type The actual type
* @param {string} preferred The preferred type
* @param {boolean} isGenericMatch
@@ -156,7 +155,6 @@ var _default = (0, _iterateJsdoc.default)(({
/**
* Gets information about the preferred type: whether there is a matching
* preferred type, what the type is, and whether it is a match to a generic.
- *
* @param {string} _type Not currently in use
* @param {string} typeNodeName
* @param {import('jsdoc-type-pratt-parser').NonRootResult|undefined} parentNode
@@ -203,7 +201,6 @@ var _default = (0, _iterateJsdoc.default)(({
/**
* Iterates strict types to see if any should be added to `invalidTypes` (and
* the the relevant strict type returned as the new preferred type).
- *
* @param {string} typeNodeName
* @param {string|undefined} preferred
* @param {import('jsdoc-type-pratt-parser').NonRootResult|undefined} parentNode
@@ -249,7 +246,6 @@ var _default = (0, _iterateJsdoc.default)(({
/**
* Collect invalid type info.
- *
* @param {string} type
* @param {string} value
* @param {string} tagName
@@ -358,7 +354,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports invalid types.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-types'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-types.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkValues.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkValues.js
index 49ef2217c96217..45d09a8ec25c11 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkValues.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/checkValues.js
@@ -4,9 +4,9 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
+var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
var _semver = _interopRequireDefault(require("semver"));
var _spdxExpressionParse = _interopRequireDefault(require("spdx-expression-parse"));
-var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const allowedKinds = new Set(['class', 'constant', 'event', 'external', 'file', 'function', 'member', 'mixin', 'module', 'namespace', 'typedef']);
var _default = (0, _iterateJsdoc.default)(({
@@ -96,7 +96,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'This rule checks the values for a handful of tags: `@version`, `@since`, `@license` and `@author`.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-check-values'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/check-values.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/emptyTags.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/emptyTags.js
index aebdf1b5b867b6..add49e810097d8 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/emptyTags.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/emptyTags.js
@@ -53,7 +53,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Expects specific tags to be empty of any content.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-empty-tags'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/empty-tags.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/implementsOnClasses.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/implementsOnClasses.js
index 68f0ed2c037eb8..3c9bae796b98a6 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/implementsOnClasses.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/implementsOnClasses.js
@@ -26,7 +26,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports an issue with any non-constructor function using `@implements`.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-implements-on-classes'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/implements-on-classes.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/importsAsDependencies.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/importsAsDependencies.js
new file mode 100644
index 00000000000000..a06666f395df68
--- /dev/null
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/importsAsDependencies.js
@@ -0,0 +1,110 @@
+"use strict";
+
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.default = void 0;
+var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
+var _fs = require("fs");
+var _isBuiltinModule = _interopRequireDefault(require("is-builtin-module"));
+var _path = require("path");
+function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+/**
+ * @type {Set|null}
+ */
+let deps;
+const setDeps = function () {
+ try {
+ const pkg = JSON.parse(
+ // @ts-expect-error It's ok
+ (0, _fs.readFileSync)((0, _path.join)(process.cwd(), './package.json')));
+ deps = new Set([...(pkg.dependencies ? Object.keys(pkg.dependencies) :
+ // istanbul ignore next
+ []), ...(pkg.devDependencies ? Object.keys(pkg.devDependencies) :
+ // istanbul ignore next
+ [])]);
+ } catch (error) {
+ // istanbul ignore next -- our package.json exists
+ deps = null;
+ /* eslint-disable no-console -- Inform user */
+ // istanbul ignore next -- our package.json exists
+ console.log(error);
+ /* eslint-enable no-console -- Inform user */
+ }
+};
+
+const moduleCheck = new Map();
+var _default = (0, _iterateJsdoc.default)(({
+ jsdoc,
+ settings,
+ utils
+}) => {
+ // istanbul ignore if
+ if (deps === undefined) {
+ setDeps();
+ }
+
+ // istanbul ignore if -- our package.json exists
+ if (deps === null) {
+ return;
+ }
+ const {
+ mode
+ } = settings;
+ for (const tag of jsdoc.tags) {
+ let typeAst;
+ try {
+ typeAst = mode === 'permissive' ? (0, _jsdoccomment.tryParse)(tag.type) : (0, _jsdoccomment.parse)(tag.type, mode);
+ } catch {
+ continue;
+ }
+
+ // eslint-disable-next-line no-loop-func -- Safe
+ (0, _jsdoccomment.traverse)(typeAst, nde => {
+ // istanbul ignore if -- TS guard
+ if (deps === null) {
+ return;
+ }
+ if (nde.type === 'JsdocTypeImport') {
+ let mod = nde.element.value.replace(/^(@[^/]+\/[^/]+|[^/]+).*$/u, '$1');
+ if (/^[./]/u.test(mod)) {
+ return;
+ }
+ if ((0, _isBuiltinModule.default)(mod)) {
+ // mod = '@types/node';
+ // moduleCheck.set(mod, !deps.has(mod));
+ return;
+ } else if (!moduleCheck.has(mod)) {
+ let pkg;
+ try {
+ pkg = JSON.parse(
+ // @ts-expect-error It's ok
+ (0, _fs.readFileSync)((0, _path.join)(process.cwd(), 'node_modules', mod, './package.json')));
+ } catch {
+ // Ignore
+ }
+ if (!pkg || !pkg.types && !pkg.typings) {
+ mod = `@types/${mod}`;
+ }
+ moduleCheck.set(mod, !deps.has(mod));
+ }
+ if (moduleCheck.get(mod)) {
+ utils.reportJSDoc('import points to package which is not found in dependencies', tag);
+ }
+ }
+ });
+ }
+}, {
+ iterateAllJsdocs: true,
+ meta: {
+ docs: {
+ description: 'Reports if JSDoc `import()` statements point to a package which is not listed in `dependencies` or `devDependencies`',
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/imports-as-dependencies.md#repos-sticky-header'
+ },
+ type: 'suggestion'
+ }
+});
+exports.default = _default;
+module.exports = exports.default;
+//# sourceMappingURL=importsAsDependencies.js.map
\ No newline at end of file
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/informativeDocs.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/informativeDocs.js
index 3e152c5f04926e..428571b2ed457e 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/informativeDocs.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/informativeDocs.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _areDocsInformative = require("are-docs-informative");
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _areDocsInformative = require("are-docs-informative");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const defaultAliases = {
a: ['an', 'our']
@@ -103,7 +103,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'This rule reports doc comments that only restate their attached name.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#informative-docs'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/informative-docs.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchDescription.js
index 19f864a388d75b..76588838c38b27 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchDescription.js
@@ -109,7 +109,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Enforces a regular expression pattern on descriptions.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-match-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/match-description.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchName.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchName.js
index 109b3efd65e75e..80859905badfa4 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchName.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/matchName.js
@@ -81,7 +81,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports the name portion of a JSDoc tag if matching or not matching a given regular expression.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-match-name'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/match-name.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/multilineBlocks.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/multilineBlocks.js
index 3758fdbccdc348..676fa115380eca 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/multilineBlocks.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/multilineBlocks.js
@@ -186,7 +186,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Controls how and whether jsdoc blocks can be expressed as single or multiple line blocks.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-multiline-blocks'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/multiline-blocks.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBadBlocks.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBadBlocks.js
index 3f119cabde1136..37871815d83305 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBadBlocks.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBadBlocks.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _commentParser = require("comment-parser");
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _commentParser = require("comment-parser");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
// Neither a single nor 3+ asterisks are valid jsdoc per
// https://jsdoc.app/about-getting-started.html#adding-documentation-comments-to-your-code
@@ -62,7 +62,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'This rule checks for multi-line-style comments which fail to meet the criteria of a jsdoc block.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-bad-blocks'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-bad-blocks.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlockDescriptions.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlockDescriptions.js
index fdcd0e073e6912..b35fe9df6eb219 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlockDescriptions.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlockDescriptions.js
@@ -52,7 +52,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Detects and removes extra lines of a blank block description',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-blank-block-descriptions'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-blank-block-descriptions.md#repos-sticky-header'
},
fixable: 'whitespace',
schema: [],
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlocks.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlocks.js
index 0ab713c0ec7e25..863485e1a8a982 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlocks.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noBlankBlocks.js
@@ -34,7 +34,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Removes empty blocks with nothing but possibly line breaks',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-blank-blocks'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-blank-blocks.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noDefaults.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noDefaults.js
index 5f8eaaa3fb1d2a..f28c15279b5d0a 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noDefaults.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noDefaults.js
@@ -45,7 +45,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'This rule reports defaults being used on the relevant portion of `@param` or `@default`.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-defaults'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-defaults.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMissingSyntax.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMissingSyntax.js
index 0bef896ab46921..b864f46702e476 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMissingSyntax.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMissingSyntax.js
@@ -4,7 +4,6 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _esquery = _interopRequireDefault(require("esquery"));
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
@@ -46,12 +45,11 @@ const incrementSelector = (state, selector, comment) => {
};
var _default = (0, _iterateJsdoc.default)(({
context,
- node,
info: {
comment
},
- sourceCode,
- state
+ state,
+ utils
}) => {
if (!context.options[0]) {
// Handle error later
@@ -62,20 +60,9 @@ var _default = (0, _iterateJsdoc.default)(({
* @type {Context[]}
*/
const contexts = context.options[0].contexts;
- const foundContext = contexts.find(cntxt => {
- return typeof cntxt === 'string' ? _esquery.default.matches(node, _esquery.default.parse(cntxt), null,
- // https://github.com/DefinitelyTyped/DefinitelyTyped/pull/65460
- // @ts-expect-error
- {
- visitorKeys: sourceCode.visitorKeys
- }) : (!cntxt.context || cntxt.context === 'any' ||
- // https://github.com/DefinitelyTyped/DefinitelyTyped/pull/65460
- // @ts-expect-error
- _esquery.default.matches(node, _esquery.default.parse(cntxt.context), null, {
- visitorKeys: sourceCode.visitorKeys
- })) && comment === cntxt.comment;
- });
- const contextStr = typeof foundContext === 'object' ? foundContext.context ?? 'any' : String(foundContext);
+ const {
+ contextStr
+ } = utils.findContext(contexts, comment);
setDefaults(state);
incrementSelector(state, contextStr, String(comment));
}, {
@@ -147,7 +134,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports when certain comment structures are always expected.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-missing-syntax'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-missing-syntax.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMultiAsterisks.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMultiAsterisks.js
index ec2b9203f15fe2..be2d20b83b5d3a 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMultiAsterisks.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noMultiAsterisks.js
@@ -74,7 +74,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: '',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-multi-asterisks'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-multi-asterisks.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noRestrictedSyntax.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noRestrictedSyntax.js
index 2db9a7503d4f92..e07995ae3656d0 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noRestrictedSyntax.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noRestrictedSyntax.js
@@ -4,17 +4,15 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _esquery = _interopRequireDefault(require("esquery"));
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var _default = (0, _iterateJsdoc.default)(({
- node,
context,
info: {
comment
},
- sourceCode,
- report
+ report,
+ utils
}) => {
if (!context.options.length) {
report('Rule `no-restricted-syntax` is missing a `contexts` option.');
@@ -23,32 +21,17 @@ var _default = (0, _iterateJsdoc.default)(({
const {
contexts
} = context.options[0];
- const foundContext = contexts.find(
- /**
- * @param {string|{context: string, comment: string}} cntxt
- * @returns {boolean}
- */
- cntxt => {
- return typeof cntxt === 'string' ? _esquery.default.matches(node, _esquery.default.parse(cntxt), null,
- // https://github.com/DefinitelyTyped/DefinitelyTyped/pull/65460
- // @ts-expect-error
- {
- visitorKeys: sourceCode.visitorKeys
- }) : (!cntxt.context || cntxt.context === 'any' || _esquery.default.matches(node, _esquery.default.parse(cntxt.context), null,
- // https://github.com/DefinitelyTyped/DefinitelyTyped/pull/65460
- // @ts-expect-error
- {
- visitorKeys: sourceCode.visitorKeys
- })) && comment === cntxt.comment;
- });
+ const {
+ foundContext,
+ contextStr
+ } = utils.findContext(contexts, comment);
// We are not on the *particular* matching context/comment, so don't assume
// we need reporting
if (!foundContext) {
return;
}
- const contextStr = typeof foundContext === 'object' ? foundContext.context ?? 'any' : foundContext;
- const message = (foundContext === null || foundContext === void 0 ? void 0 : foundContext.message) ?? 'Syntax is restricted: {{context}}' + (comment ? ' with {{comment}}' : '');
+ const message = /** @type {import('../iterateJsdoc.js').ContextObject} */(foundContext === null || foundContext === void 0 ? void 0 : foundContext.message) ?? 'Syntax is restricted: {{context}}' + (comment ? ' with {{comment}}' : '');
report(message, null, null, comment ? {
comment,
context: contextStr
@@ -60,7 +43,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Reports when certain comment structures are present.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-restricted-syntax'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-restricted-syntax.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noTypes.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noTypes.js
index 9cf8d11b7bce51..57f19c792d0d8b 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noTypes.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noTypes.js
@@ -36,7 +36,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'This rule reports types being used on `@param` or `@returns`.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-types'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-types.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noUndefinedTypes.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noUndefinedTypes.js
index c17cc4092e152a..aa45b0be08b001 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noUndefinedTypes.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/noUndefinedTypes.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _jsdoccomment = require("@es-joy/jsdoccomment");
var _iterateJsdoc = _interopRequireWildcard(require("../iterateJsdoc"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
const extraTypes = ['null', 'undefined', 'void', 'string', 'boolean', 'object', 'function', 'symbol', 'number', 'bigint', 'NaN', 'Infinity', 'any', '*', 'never', 'unknown', 'const', 'this', 'true', 'false', 'Array', 'Object', 'RegExp', 'Date', 'Function'];
@@ -70,9 +70,7 @@ var _default = (0, _iterateJsdoc.default)(({
utils.reportSettings('Invalid `settings.jsdoc.preferredTypes`. Values must be falsy, a string, or an object.');
}
return stripPseudoTypes(preferredType.replacement);
- }).filter(preferredType => {
- return preferredType;
- });
+ }).filter(Boolean);
}
const typedefDeclarations = sourceCode.getAllComments().filter(comment => {
return /^\*\s/u.test(comment.value);
@@ -188,10 +186,9 @@ var _default = (0, _iterateJsdoc.default)(({
}) => {
return utils.isNamepathOrUrlReferencingTag(tag);
}).map(tagToParsedType('namepathOrURL'));
- const tagsWithTypes = /** @type {TypeAndTagInfo[]} */[...typeTags, ...namepathReferencingTags, ...namepathOrUrlReferencingTags].filter(result => {
- // Remove types which failed to parse
- return result;
- });
+ const tagsWithTypes = /** @type {TypeAndTagInfo[]} */[...typeTags, ...namepathReferencingTags, ...namepathOrUrlReferencingTags
+ // Remove types which failed to parse
+ ].filter(Boolean);
for (const {
tag,
parsedType
@@ -219,7 +216,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Checks that types in jsdoc comments are defined.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-no-undefined-types'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/no-undefined-types.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireAsteriskPrefix.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireAsteriskPrefix.js
index 0c2eb5372b0dc4..7c25b2fce7b68e 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireAsteriskPrefix.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireAsteriskPrefix.js
@@ -122,6 +122,10 @@ var _default = (0, _iterateJsdoc.default)(({
}, {
iterateAllJsdocs: true,
meta: {
+ docs: {
+ description: 'Requires that each JSDoc line starts with an `*`.',
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-asterisk-prefix.md#repos-sticky-header'
+ },
fixable: 'code',
schema: [{
enum: ['always', 'never', 'any'],
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescription.js
index acb46dd0ee1dc6..d234d213f42277 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescription.js
@@ -79,7 +79,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all functions have a description.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-description.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescriptionCompleteSentence.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescriptionCompleteSentence.js
index 30150acaa92cac..e4a275318f23a7 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescriptionCompleteSentence.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireDescriptionCompleteSentence.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _escapeStringRegexp = _interopRequireDefault(require("escape-string-regexp"));
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _escapeStringRegexp = _interopRequireDefault(require("escape-string-regexp"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const otherDescriptiveTags = new Set([
// 'copyright' and 'see' might be good addition, but as the former may be
@@ -29,7 +29,7 @@ const extractParagraphs = text => {
const extractSentences = (text, abbreviationsRegex) => {
const txt = text
// Remove all {} tags.
- .replace(/\{[\s\S]*?\}\s*/gu, '')
+ .replaceAll(/\{[\s\S]*?\}\s*/gu, '')
// Remove custom abbreviations
.replace(abbreviationsRegex, '');
@@ -191,7 +191,7 @@ var _default = (0, _iterateJsdoc.default)(({
newlineBeforeCapsAssumesBadSentenceEnd = false
} = context.options[0] || {};
const abbreviationsRegex = abbreviations.length ? new RegExp('\\b' + abbreviations.map(abbreviation => {
- return (0, _escapeStringRegexp.default)(abbreviation.replace(/\.$/ug, '') + '.');
+ return (0, _escapeStringRegexp.default)(abbreviation.replaceAll(/\.$/ug, '') + '.');
}).join('|') + '(?:$|\\s)', 'gu') : '';
let {
description
@@ -250,7 +250,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that block description, explicit `@description`, and `@param`/`@returns` tag descriptions are written in complete sentences.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-description-complete-sentence'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-description-complete-sentence.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireExample.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireExample.js
index 1982512f337985..a6bba658c9af06 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireExample.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireExample.js
@@ -47,7 +47,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all functions have examples.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-example'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-example.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireFileOverview.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireFileOverview.js
index cb2e43b8895cbd..d944e0e672e096 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireFileOverview.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireFileOverview.js
@@ -92,7 +92,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Checks that all files have one `@file`, `@fileoverview`, or `@overview` tag at the beginning of the file.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-file-overview'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-file-overview.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireHyphenBeforeParamDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireHyphenBeforeParamDescription.js
index 156e2a38222dce..516b6afd55b737 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireHyphenBeforeParamDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireHyphenBeforeParamDescription.js
@@ -17,8 +17,6 @@ var _default = (0, _iterateJsdoc.default)(({
const [mainCircumstance, {
tags = null
} = {}] = context.options;
-
- /* eslint-disable jsdoc/valid-types -- Old version */
const tgs =
/**
* @type {null|"any"|{[key: string]: "always"|"never"}}
@@ -27,9 +25,7 @@ var _default = (0, _iterateJsdoc.default)(({
/* eslint-enable jsdoc/valid-types -- Old version */
/**
- * @param {import('comment-parser').Spec & {
- * line: import('../iterateJsdoc.js').Integer
- * }} jsdocTag
+ * @param {import('@es-joy/jsdoccomment').JsdocTagWithInline} jsdocTag
* @param {string} targetTagName
* @param {"always"|"never"} [circumstance]
* @returns {void}
@@ -44,7 +40,8 @@ var _default = (0, _iterateJsdoc.default)(({
if (always) {
if (!startsWithHyphen) {
report(`There must be a hyphen before @${targetTagName} description.`, fixer => {
- const lineIndex = jsdocTag.line;
+ const lineIndex = /** @type {import('../iterateJsdoc.js').Integer} */
+ jsdocTag.line;
const sourceLines = sourceCode.getText(jsdocNode).split('\n');
// Get start index of description, accounting for multi-line descriptions
@@ -112,7 +109,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires a hyphen before the `@param` description.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-hyphen-before-param-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-hyphen-before-param-description.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireJsdoc.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireJsdoc.js
index b4bbdffb6226ae..e37c21140390b1 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireJsdoc.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireJsdoc.js
@@ -4,11 +4,19 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _jsdoccomment = require("@es-joy/jsdoccomment");
var _exportParser = _interopRequireDefault(require("../exportParser"));
var _iterateJsdoc = require("../iterateJsdoc");
var _jsdocUtils = _interopRequireDefault(require("../jsdocUtils"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
+/**
+ * @typedef {{
+ * ancestorsOnly: boolean,
+ * esm: boolean,
+ * initModuleExports: boolean,
+ * initWindow: boolean
+ * }} RequireJsdocOpts
+ */
/** @type {import('json-schema').JSONSchema4} */
const OPTIONS_SCHEMA = {
additionalProperties: false,
@@ -149,14 +157,11 @@ const getOption = (context, baseObject, option, key) => {
typeof context.options[0][option] === 'boolean' || key in context.options[0][option])) {
return context.options[0][option][key];
}
-
- /* eslint-disable jsdoc/valid-types -- Old version */
return (/** @type {{[key: string]: {default?: boolean|undefined}}} */baseObject.properties[key].default
);
/* eslint-enable jsdoc/valid-types -- Old version */
};
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @param {import('eslint').Rule.RuleContext} context
* @param {import('../iterateJsdoc.js').Settings} settings
@@ -198,7 +203,6 @@ const getOptions = (context, settings) => {
return false;
}
- /* eslint-disable jsdoc/valid-types -- Old version */
/** @type {{[key: string]: boolean|undefined}} */
const properties = {};
/* eslint-enable jsdoc/valid-types -- Old version */
@@ -213,7 +217,6 @@ const getOptions = (context, settings) => {
/** @type {import('json-schema').JSONSchema4Object} */
OPTIONS_SCHEMA.properties.publicOnly.oneOf[1]),
require: (baseObj => {
- /* eslint-disable jsdoc/valid-types -- Old version */
/** @type {{[key: string]: boolean|undefined}} */
const properties = {};
/* eslint-enable jsdoc/valid-types -- Old version */
@@ -247,7 +250,7 @@ var _default = {
fixerMessage,
minLineCount
} = opts;
- const publicOnly = /* eslint-disable jsdoc/valid-types -- Old version */
+ const publicOnly =
/**
* @type {{
* [key: string]: boolean | undefined;
@@ -313,6 +316,7 @@ var _default = {
// setters or getters) being reported
if (_jsdocUtils.default.exemptSpeciaMethods({
description: '',
+ inlineTags: [],
problems: [],
source: [],
tags: []
@@ -386,6 +390,7 @@ var _default = {
});
};
if (publicOnly) {
+ /** @type {RequireJsdocOpts} */
const opt = {
ancestorsOnly: Boolean((publicOnly === null || publicOnly === void 0 ? void 0 : publicOnly.ancestorsOnly) ?? false),
esm: Boolean((publicOnly === null || publicOnly === void 0 ? void 0 : publicOnly.esm) ?? true),
@@ -485,7 +490,7 @@ var _default = {
category: 'Stylistic Issues',
description: 'Require JSDoc comments',
recommended: true,
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-jsdoc'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-jsdoc.md#repos-sticky-header'
},
fixable: 'code',
messages: {
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParam.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParam.js
index 3925ce4d6ba91a..f5e6f903d61f64 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParam.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParam.js
@@ -7,12 +7,15 @@ exports.default = void 0;
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
- * @template T
+ * @typedef {[string, boolean, () => RootNamerReturn]} RootNamerReturn
+ */
+/**
* @param {string[]} desiredRoots
* @param {number} currentIndex
- * @returns {[string, boolean, () => T]}
+ * @returns {RootNamerReturn}
*/
const rootNamer = (desiredRoots, currentIndex) => {
+ /** @type {string} */
let name;
let idx = currentIndex;
const incremented = desiredRoots.length <= 1;
@@ -21,7 +24,7 @@ const rootNamer = (desiredRoots, currentIndex) => {
const suffix = idx++;
name = `${base}${suffix}`;
} else {
- name = desiredRoots.shift();
+ name = /** @type {string} */desiredRoots.shift();
}
return [name, incremented, () => {
return rootNamer(desiredRoots, idx);
@@ -65,7 +68,15 @@ var _default = (0, _iterateJsdoc.default)(({
if (!functionParameterNames.length) {
return;
}
- const jsdocParameterNames = utils.getJsdocTagsDeep(preferredTagName);
+ const jsdocParameterNames =
+ /**
+ * @type {{
+ * idx: import('../iterateJsdoc.js').Integer;
+ * name: string;
+ * type: string;
+ * }[]}
+ */
+ utils.getJsdocTagsDeep(preferredTagName);
const shallowJsdocParameterNames = jsdocParameterNames.filter(tag => {
return !tag.name.includes('.');
}).map((tag, idx) => {
@@ -75,17 +86,38 @@ var _default = (0, _iterateJsdoc.default)(({
};
});
const checkTypesRegex = utils.getRegexFromString(checkTypesPattern);
+
+ /**
+ * @type {{
+ * functionParameterIdx: import('../iterateJsdoc.js').Integer,
+ * functionParameterName: string,
+ * inc: boolean|undefined,
+ * remove?: true,
+ * type?: string|undefined
+ * }[]}
+ */
const missingTags = [];
const flattenedRoots = utils.flattenRoots(functionParameterNames).names;
+
+ /**
+ * @type {{
+ * [key: string]: import('../iterateJsdoc.js').Integer
+ * }}
+ */
const paramIndex = {};
+
+ /**
+ * @param {string} cur
+ * @returns {boolean}
+ */
const hasParamIndex = cur => {
return utils.dropPathSegmentQuotes(String(cur)) in paramIndex;
};
/**
*
- * @param {} cur
- * @returns {}
+ * @param {string|number|undefined} cur
+ * @returns {import('../iterateJsdoc.js').Integer}
*/
const getParamIndex = cur => {
return paramIndex[utils.dropPathSegmentQuotes(String(cur))];
@@ -93,8 +125,8 @@ var _default = (0, _iterateJsdoc.default)(({
/**
*
- * @param {} cur
- * @param {} idx
+ * @param {string} cur
+ * @param {import('../iterateJsdoc.js').Integer} idx
* @returns {void}
*/
const setParamIndex = (cur, idx) => {
@@ -106,8 +138,10 @@ var _default = (0, _iterateJsdoc.default)(({
/**
*
- * @param {} jsdocTags
- * @param {} indexAtFunctionParams
+ * @param {(import('@es-joy/jsdoccomment').JsdocTagWithInline & {
+ * newAdd?: boolean
+ * })[]} jsdocTags
+ * @param {import('../iterateJsdoc.js').Integer} indexAtFunctionParams
* @returns {import('../iterateJsdoc.js').Integer}
*/
const findExpectedIndex = (jsdocTags, indexAtFunctionParams) => {
@@ -118,7 +152,14 @@ var _default = (0, _iterateJsdoc.default)(({
}) => {
return !newAdd && remainingRoots.some(remainingRoot => {
if (Array.isArray(remainingRoot)) {
- return remainingRoot[1].names.includes(name);
+ return (
+ /**
+ * @type {import('../jsdocUtils.js').FlattendRootInfo & {
+ * annotationParamName?: string|undefined;
+ * }}
+ */
+ remainingRoot[1].names.includes(name)
+ );
}
if (typeof remainingRoot === 'object') {
return name === remainingRoot.name;
@@ -152,6 +193,8 @@ var _default = (0, _iterateJsdoc.default)(({
let inc;
if (Array.isArray(functionParameterName)) {
const matchedJsdoc = shallowJsdocParameterNames[functionParameterIdx] || jsdocParameterNames[functionParameterIdx];
+
+ /** @type {string} */
let rootName;
if (functionParameterName[0]) {
rootName = functionParameterName[0];
@@ -170,7 +213,13 @@ var _default = (0, _iterateJsdoc.default)(({
hasPropertyRest,
rests,
names
- } = functionParameterName[1];
+ } =
+ /**
+ * @type {import('../jsdocUtils.js').FlattendRootInfo & {
+ * annotationParamName?: string | undefined;
+ * }}
+ */
+ functionParameterName[1];
const notCheckingNames = [];
if (!enableRestElementFixer && hasRestElement) {
continue;
@@ -246,16 +295,18 @@ var _default = (0, _iterateJsdoc.default)(({
}
continue;
}
+
+ /** @type {string} */
let funcParamName;
let type;
if (typeof functionParameterName === 'object') {
if (!enableRestElementFixer && functionParameterName.restElement) {
continue;
}
- funcParamName = functionParameterName.name;
+ funcParamName = /** @type {string} */functionParameterName.name;
type = '{...any}';
} else {
- funcParamName = functionParameterName;
+ funcParamName = /** @type {string} */functionParameterName;
}
if (jsdocParameterNames && !jsdocParameterNames.find(({
name
@@ -276,9 +327,9 @@ var _default = (0, _iterateJsdoc.default)(({
* @param {{
* functionParameterIdx: import('../iterateJsdoc.js').Integer,
* functionParameterName: string,
- * remove: true,
- * inc: boolean,
- * type: string
+ * remove?: true,
+ * inc?: boolean,
+ * type?: string
* }} cfg
*/
const fix = ({
@@ -294,15 +345,16 @@ var _default = (0, _iterateJsdoc.default)(({
/**
*
- * @param {} tagIndex
- * @param {} sourceIndex
- * @param {} spliceCount
- * @returns {}
+ * @param {import('../iterateJsdoc.js').Integer} tagIndex
+ * @param {import('../iterateJsdoc.js').Integer} sourceIndex
+ * @param {import('../iterateJsdoc.js').Integer} spliceCount
+ * @returns {void}
*/
const createTokens = (tagIndex, sourceIndex, spliceCount) => {
// console.log(sourceIndex, tagIndex, jsdoc.tags, jsdoc.source);
const tokens = {
number: sourceIndex + 1,
+ source: '',
tokens: {
delimiter: '*',
description: '',
@@ -319,9 +371,19 @@ var _default = (0, _iterateJsdoc.default)(({
type: type ?? ''
}
};
+
+ /**
+ * @type {(import('@es-joy/jsdoccomment').JsdocTagWithInline & {
+ * newAdd?: true
+ * })[]}
+ */
jsdoc.tags.splice(tagIndex, spliceCount, {
+ description: '',
+ inlineTags: [],
name: functionParameterName,
newAdd: true,
+ optional: false,
+ problems: [],
source: [tokens],
tag: preferredTagName,
type: type ?? ''
@@ -369,7 +431,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all function parameters are documented.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-param'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-param.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamDescription.js
index 09c4febb5e6f9d..38cca5b75b2d0c 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamDescription.js
@@ -43,7 +43,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that each `@param` tag has a `description` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-param-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-param-description.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamName.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamName.js
index 309a4a0a5edc84..c86b745c9531ff 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamName.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamName.js
@@ -20,7 +20,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all function parameters have names.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-param-name'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-param-name.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamType.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamType.js
index e52938573b6f41..cec0bfd0caad18 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamType.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireParamType.js
@@ -43,7 +43,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that each `@param` tag has a `type` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-param-type'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-param-type.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireProperty.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireProperty.js
index 281fb722ae2057..6a3b59303fe259 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireProperty.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireProperty.js
@@ -36,7 +36,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all `@typedef` and `@namespace` tags have `@property` when their type is a plain `object`, `Object`, or `PlainObject`.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-property'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-property.md#repos-sticky-header'
},
fixable: 'code',
type: 'suggestion'
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyDescription.js
index 365fe456d1c2c3..d14345d4920f4b 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyDescription.js
@@ -20,7 +20,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that each `@property` tag has a `description` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-property-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-property-description.md#repos-sticky-header'
},
type: 'suggestion'
}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyName.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyName.js
index 5b4e2a0a3c6be6..f9d9f7a934be31 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyName.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyName.js
@@ -20,7 +20,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that all function `@property` tags have names.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-property-name'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-property-name.md#repos-sticky-header'
},
type: 'suggestion'
}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyType.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyType.js
index ffc58f9df36ba5..55b1e81da66d35 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyType.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requirePropertyType.js
@@ -20,7 +20,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that each `@property` tag has a `type` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-property-type'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-property-type.md#repos-sticky-header'
},
type: 'suggestion'
}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturns.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturns.js
index 3dc3527c876ae3..2ec773a932046d 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturns.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturns.js
@@ -11,7 +11,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* or the method is either a constructor or an abstract method.
*
* In either of these cases the return value is optional or not defined.
- *
* @param {import('../iterateJsdoc.js').Utils} utils
* a reference to the utils which are used to probe if a tag is present or not.
* @returns {boolean}
@@ -34,11 +33,15 @@ const canSkip = utils => {
'interface']) || utils.avoidDocs();
};
var _default = (0, _iterateJsdoc.default)(({
+ info: {
+ comment
+ },
report,
utils,
context
}) => {
const {
+ contexts,
forceRequireReturn = false,
forceReturnsWithAsync = false
} = context.options[0] || {};
@@ -48,6 +51,17 @@ var _default = (0, _iterateJsdoc.default)(({
if (canSkip(utils)) {
return;
}
+
+ /** @type {boolean|undefined} */
+ let forceRequireReturnContext;
+ if (contexts) {
+ const {
+ foundContext
+ } = utils.findContext(contexts, comment);
+ if (typeof foundContext === 'object') {
+ forceRequireReturnContext = foundContext.forceRequireReturn;
+ }
+ }
const tagName = /** @type {string} */utils.getPreferredTagName({
tagName: 'returns'
});
@@ -67,7 +81,7 @@ var _default = (0, _iterateJsdoc.default)(({
if (!missingReturnTag) {
return false;
}
- if (forceRequireReturn && (iteratingFunction || utils.isVirtualFunction())) {
+ if ((forceRequireReturn || forceRequireReturnContext) && (iteratingFunction || utils.isVirtualFunction())) {
return true;
}
const isAsync = !iteratingFunction && utils.hasTag('async') || iteratingFunction && utils.isAsync();
@@ -84,7 +98,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that returns are documented.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-returns'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-returns.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
@@ -109,6 +123,9 @@ var _default = (0, _iterateJsdoc.default)(({
},
context: {
type: 'string'
+ },
+ forceRequireReturn: {
+ type: 'boolean'
}
},
type: 'object'
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsCheck.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsCheck.js
index cb2813220f20e8..dc7b751f623b6b 100755
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsCheck.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsCheck.js
@@ -83,7 +83,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires a return statement in function body if a `@returns` tag is specified in jsdoc comment.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-returns-check'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-returns-check.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsDescription.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsDescription.js
index 4bd63619601aa2..81acf73fd729a5 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsDescription.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsDescription.js
@@ -24,7 +24,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that the `@returns` tag has a `description` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-returns-description'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-returns-description.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsType.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsType.js
index f71511293714c2..af1d7997ade966 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsType.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireReturnsType.js
@@ -20,7 +20,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that `@returns` tag has `type` value.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-returns-type'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-returns-type.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireThrows.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireThrows.js
index 37e5309727f80f..a49464faccf10b 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireThrows.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireThrows.js
@@ -9,7 +9,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
/**
* We can skip checking for a throws value, in case the documentation is inherited
* or the method is either a constructor or an abstract method.
- *
* @param {import('../iterateJsdoc.js').Utils} utils a reference to the utils which are used to probe if a tag is present or not.
* @returns {boolean} true in case deep checking can be skipped; otherwise false.
*/
@@ -62,7 +61,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires that throw statements are documented.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-throws'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-throws.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYields.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYields.js
index d4e420e14e2f8c..915745859ad99f 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYields.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYields.js
@@ -11,7 +11,6 @@ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { de
* or the method has a constructor or abstract tag.
*
* In either of these cases the yield value is optional or not defined.
- *
* @param {import('../iterateJsdoc.js').Utils} utils a reference to the utils which are used to probe if a tag is present or not.
* @returns {boolean} true in case deep checking can be skipped; otherwise false.
*/
@@ -116,7 +115,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires yields are documented.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-yields'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-yields.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYieldsCheck.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYieldsCheck.js
index a6315d2e9b469a..92060405d86943 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYieldsCheck.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/requireYieldsCheck.js
@@ -113,7 +113,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires a yield statement in function body if a `@yields` tag is specified in jsdoc comment.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-require-yields-check'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/require-yields-check.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/sortTags.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/sortTags.js
index da66ac089b10d1..da6939dd43205e 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/sortTags.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/sortTags.js
@@ -42,7 +42,7 @@ var _default = (0, _iterateJsdoc.default)(({
for (const [idx, tag] of
/**
* @type {(
- * import('comment-parser').Spec & {
+ * import('@es-joy/jsdoccomment').JsdocTagWithInline & {
* originalIndex: import('../iterateJsdoc.js').Integer,
* originalLine: import('../iterateJsdoc.js').Integer,
* }
@@ -408,7 +408,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Sorts tags by a specified sequence according to tag name.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-sort-tags'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/sort-tags.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/tagLines.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/tagLines.js
index 44fd423661e2f4..291bad8e2a806f 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/tagLines.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/tagLines.js
@@ -218,7 +218,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Enforces lines (or no lines) between tags.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-tag-lines'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/tag-lines.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/textEscaping.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/textEscaping.js
index b5810ec403078a..7714c5f8460ee8 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/textEscaping.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/textEscaping.js
@@ -16,7 +16,7 @@ const markdownRegex = /(? {
- return desc.replace(new RegExp(htmlRegex, 'gu'), _ => {
+ return desc.replaceAll(new RegExp(htmlRegex, 'gu'), _ => {
if (_ === '<') {
return '<';
}
@@ -29,7 +29,7 @@ const htmlReplacer = desc => {
* @returns {string}
*/
const markdownReplacer = desc => {
- return desc.replace(new RegExp(markdownRegex, 'gu'), (_, backticks, encapsed) => {
+ return desc.replaceAll(new RegExp(markdownRegex, 'gu'), (_, backticks, encapsed) => {
const bookend = '`'.repeat(backticks.length);
return `\\${bookend}${encapsed}${bookend}`;
});
@@ -108,7 +108,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: '',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-text-escaping'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/text-escaping.md#repos-sticky-header'
},
fixable: 'code',
schema: [{
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/validTypes.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/validTypes.js
index 215792f1a9c2de..ad0f6a015e3580 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/validTypes.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/rules/validTypes.js
@@ -4,8 +4,8 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
-var _jsdoccomment = require("@es-joy/jsdoccomment");
var _iterateJsdoc = _interopRequireDefault(require("../iterateJsdoc"));
+var _jsdoccomment = require("@es-joy/jsdoccomment");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
const asExpression = /as\s+/u;
const suppressTypes = new Set([
@@ -229,7 +229,7 @@ var _default = (0, _iterateJsdoc.default)(({
meta: {
docs: {
description: 'Requires all types to be valid JSDoc or Closure compiler types without syntax errors.',
- url: 'https://github.com/gajus/eslint-plugin-jsdoc#eslint-plugin-jsdoc-rules-valid-types'
+ url: 'https://github.com/gajus/eslint-plugin-jsdoc/blob/main/docs/rules/valid-types.md#repos-sticky-header'
},
schema: [{
additionalProperties: false,
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/tagNames.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/tagNames.js
index e4abbc1bbb3060..2064fd87ae6da6 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/tagNames.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/tagNames.js
@@ -4,13 +4,11 @@ Object.defineProperty(exports, "__esModule", {
value: true
});
exports.typeScriptTags = exports.jsdocTags = exports.closureTags = void 0;
-/* eslint-disable jsdoc/valid-types -- Old version */
/**
* @typedef {{
* [key: string]: string[]
* }} AliasedTags
*/
-/* eslint-enable jsdoc/valid-types -- Old version */
/**
* @type {AliasedTags}
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/utils/hasReturnValue.js b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/utils/hasReturnValue.js
index 4fd2e3c31d23ed..6b3d563d0471dd 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/utils/hasReturnValue.js
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/dist/utils/hasReturnValue.js
@@ -12,7 +12,6 @@ exports.hasValueOrExecutorHasNonEmptyResolveValue = exports.hasReturnValue = voi
/**
* Checks if a node is a promise but has no resolve value or an empty value.
* An `undefined` resolve does not count.
- *
* @param {ESTreeOrTypeScriptNode|undefined|null} node
* @returns {boolean|undefined|null}
*/
@@ -33,7 +32,6 @@ const undefinedKeywords = new Set(['TSVoidKeyword', 'TSUndefinedKeyword', 'TSNev
/**
* Checks if a node has a return statement. Void return does not count.
- *
* @param {ESTreeOrTypeScriptNode|undefined|null} node
* @param {boolean} [throwOnNullReturn]
* @param {PromiseFilter} [promFilter]
@@ -118,7 +116,6 @@ const hasReturnValue = (node, throwOnNullReturn, promFilter) => {
/**
* Checks if a node has a return statement. Void return does not count.
- *
* @param {ESTreeOrTypeScriptNode|null|undefined} node
* @param {PromiseFilter} promFilter
* @returns {undefined|boolean|ESTreeOrTypeScriptNode}
@@ -249,7 +246,6 @@ const allBrancheshaveReturnValues = (node, promFilter) => {
* This could check for redeclaration of the resolver, but as such is
* unlikely, we avoid the performance cost of checking everywhere for
* (re)declarations or assignments.
- *
* @param {import('@typescript-eslint/types').TSESTree.Node|null|undefined} node
* @param {string} resolverName
* @returns {boolean}
@@ -433,7 +429,6 @@ const hasNonEmptyResolverCall = (node, resolverName) => {
/**
* Checks if a Promise executor has no resolve value or an empty value.
* An `undefined` resolve does not count.
- *
* @param {ESTreeOrTypeScriptNode} node
* @param {boolean} anyPromiseAsReturn
* @param {boolean} [allBranches]
diff --git a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/package.json b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/package.json
index b974a0b4c7b5a6..b0fff02d7e513c 100644
--- a/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/package.json
+++ b/tools/node_modules/eslint/node_modules/eslint-plugin-jsdoc/package.json
@@ -5,12 +5,13 @@
"url": "http://gajus.com"
},
"dependencies": {
- "@es-joy/jsdoccomment": "~0.39.3",
+ "@es-joy/jsdoccomment": "~0.39.4",
"are-docs-informative": "^0.0.2",
"comment-parser": "1.3.1",
"debug": "^4.3.4",
"escape-string-regexp": "^4.0.0",
"esquery": "^1.5.0",
+ "is-builtin-module": "^3.2.1",
"semver": "^7.5.1",
"spdx-expression-parse": "^3.0.1"
},
@@ -24,6 +25,7 @@
"@babel/plugin-transform-flow-strip-types": "^7.21.0",
"@babel/preset-env": "^7.21.5",
"@babel/register": "^7.21.0",
+ "@es-joy/escodegen": "^3.5.1",
"@es-joy/jsdoc-eslint-parser": "^0.19.0",
"@hkdobrev/run-if-changed": "^0.3.1",
"@semantic-release/commit-analyzer": "^9.0.2",
@@ -32,24 +34,25 @@
"@types/chai": "^4.3.5",
"@types/debug": "^4.1.7",
"@types/eslint": "^8.37.0",
- "@types/esquery": "^1.0.2",
+ "@types/esquery": "^1.5.0",
"@types/estree": "^1.0.1",
"@types/lodash.defaultsdeep": "^4.6.7",
"@types/mocha": "^10.0.1",
- "@types/node": "^20.1.4",
+ "@types/node": "^20.2.5",
"@types/semver": "^7.5.0",
"@types/spdx-expression-parse": "^3.0.2",
- "@typescript-eslint/parser": "^5.59.5",
+ "@typescript-eslint/parser": "^5.59.6",
"babel-plugin-add-module-exports": "^1.0.4",
"babel-plugin-istanbul": "^6.1.1",
"camelcase": "^6.3.0",
"chai": "^4.3.7",
"cross-env": "^7.0.3",
"decamelize": "^5.0.1",
- "eslint": "8.39.0",
- "eslint-config-canonical": "~33.0.1",
+ "eslint": "8.41.0",
+ "eslint-config-canonical": "~41.0.4",
+ "espree": "^9.5.2",
"gitdown": "^3.1.5",
- "glob": "^10.2.3",
+ "glob": "^10.2.6",
"husky": "^8.0.3",
"jsdoc-type-pratt-parser": "^4.0.0",
"lint-staged": "^13.2.2",
@@ -57,7 +60,7 @@
"mocha": "^10.2.0",
"nyc": "^15.1.0",
"open-editor": "^3.0.0",
- "rimraf": "^5.0.0",
+ "rimraf": "^5.0.1",
"semantic-release": "^21.0.2",
"typescript": "^5.0.4"
},
@@ -120,8 +123,9 @@
"tsc": "tsc",
"build": "rimraf ./dist && cross-env NODE_ENV=production babel ./src --out-dir ./dist --copy-files --source-maps --ignore ./src/bin/*.js --no-copy-ignored",
"check-docs": "babel-node ./src/bin/generateDocs.js --check",
- "create-docs": "babel-node ./src/bin/generateDocs.js",
+ "create-docs": "npm run create-options && babel-node ./src/bin/generateDocs.js",
"create-rule": "babel-node ./src/bin/generateRule.js",
+ "create-options": "node ./src/bin/generateOptions.mjs",
"install-offline": "pnpm install --prefer-offline --no-audit",
"lint": "npm run lint-arg -- .",
"lint-arg": "eslint --report-unused-disable-directives",
@@ -132,5 +136,5 @@
"test-cov": "cross-env TIMING=1 nyc --reporter text npm run test-no-cov",
"test-index": "npm run test-no-cov -- test/rules/index.js"
},
- "version": "44.2.4"
+ "version": "46.2.6"
}
diff --git a/tools/node_modules/eslint/node_modules/is-builtin-module/index.js b/tools/node_modules/eslint/node_modules/is-builtin-module/index.js
new file mode 100644
index 00000000000000..e79a925c3d3d26
--- /dev/null
+++ b/tools/node_modules/eslint/node_modules/is-builtin-module/index.js
@@ -0,0 +1,22 @@
+'use strict';
+const builtinModules = require('builtin-modules');
+
+const moduleSet = new Set(builtinModules);
+const NODE_PROTOCOL = 'node:';
+
+module.exports = moduleName => {
+ if (typeof moduleName !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ if (moduleName.startsWith(NODE_PROTOCOL)) {
+ moduleName = moduleName.slice(NODE_PROTOCOL.length);
+ }
+
+ const slashIndex = moduleName.indexOf('/');
+ if (slashIndex !== -1 && slashIndex !== moduleName.length - 1) {
+ moduleName = moduleName.slice(0, slashIndex);
+ }
+
+ return moduleSet.has(moduleName);
+};
diff --git a/deps/npm/node_modules/has-flag/license b/tools/node_modules/eslint/node_modules/is-builtin-module/license
similarity index 100%
rename from deps/npm/node_modules/has-flag/license
rename to tools/node_modules/eslint/node_modules/is-builtin-module/license
diff --git a/tools/node_modules/eslint/node_modules/is-builtin-module/package.json b/tools/node_modules/eslint/node_modules/is-builtin-module/package.json
new file mode 100644
index 00000000000000..866e5baffdf205
--- /dev/null
+++ b/tools/node_modules/eslint/node_modules/is-builtin-module/package.json
@@ -0,0 +1,47 @@
+{
+ "name": "is-builtin-module",
+ "version": "3.2.1",
+ "description": "Check if a string matches the name of a Node.js builtin module",
+ "license": "MIT",
+ "repository": "sindresorhus/is-builtin-module",
+ "funding": "https://github.com/sponsors/sindresorhus",
+ "author": {
+ "name": "Sindre Sorhus",
+ "email": "sindresorhus@gmail.com",
+ "url": "https://sindresorhus.com"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "scripts": {
+ "test": "xo && ava && tsd"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts"
+ ],
+ "keywords": [
+ "builtin",
+ "built-in",
+ "builtins",
+ "node",
+ "modules",
+ "core",
+ "bundled",
+ "list",
+ "array",
+ "names",
+ "is",
+ "detect",
+ "check",
+ "match"
+ ],
+ "dependencies": {
+ "builtin-modules": "^3.3.0"
+ },
+ "devDependencies": {
+ "ava": "^0.25.0",
+ "tsd": "^0.7.2",
+ "xo": "^0.23.0"
+ }
+}
diff --git a/tools/node_modules/eslint/node_modules/is-builtin-module/readme.md b/tools/node_modules/eslint/node_modules/is-builtin-module/readme.md
new file mode 100644
index 00000000000000..8304ebdf022d6f
--- /dev/null
+++ b/tools/node_modules/eslint/node_modules/is-builtin-module/readme.md
@@ -0,0 +1,47 @@
+# is-builtin-module
+
+> Check if a string matches the name of a Node.js builtin module
+
+
+## Install
+
+```
+$ npm install is-builtin-module
+```
+
+
+## Usage
+
+```js
+const isBuiltinModule = require('is-builtin-module');
+
+isBuiltinModule('fs');
+//=> true
+
+isBuiltinModule('fs/promises');
+//=> true
+
+isBuiltinModule('node:fs/promises');
+//=> true
+
+isBuiltinModule('unicorn');
+//=> false
+```
+
+
+## Related
+
+- [builtin-modules](https://github.com/sindresorhus/builtin-modules) - List of the Node.js builtin modules
+
+
+---
+
+
+
+ Get professional support for this package with a Tidelift subscription
+
+
+
+ Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies.
+
+
diff --git a/tools/node_modules/eslint/node_modules/node-releases/data/processed/envs.json b/tools/node_modules/eslint/node_modules/node-releases/data/processed/envs.json
index b26bcea5db78cf..0db089f3acd77f 100644
--- a/tools/node_modules/eslint/node_modules/node-releases/data/processed/envs.json
+++ b/tools/node_modules/eslint/node_modules/node-releases/data/processed/envs.json
@@ -1 +1 @@
-[{"name":"nodejs","version":"0.2.0","date":"2011-08-26","lts":false,"security":false},{"name":"nodejs","version":"0.3.0","date":"2011-08-26","lts":false,"security":false},{"name":"nodejs","version":"0.4.0","date":"2011-08-26","lts":false,"security":false},{"name":"nodejs","version":"0.5.0","date":"2011-08-26","lts":false,"security":false},{"name":"nodejs","version":"0.6.0","date":"2011-11-04","lts":false,"security":false},{"name":"nodejs","version":"0.7.0","date":"2012-01-17","lts":false,"security":false},{"name":"nodejs","version":"0.8.0","date":"2012-06-22","lts":false,"security":false},{"name":"nodejs","version":"0.9.0","date":"2012-07-20","lts":false,"security":false},{"name":"nodejs","version":"0.10.0","date":"2013-03-11","lts":false,"security":false},{"name":"nodejs","version":"0.11.0","date":"2013-03-28","lts":false,"security":false},{"name":"nodejs","version":"0.12.0","date":"2015-02-06","lts":false,"security":false},{"name":"nodejs","version":"4.0.0","date":"2015-09-08","lts":false,"security":false},{"name":"nodejs","version":"4.1.0","date":"2015-09-17","lts":false,"security":false},{"name":"nodejs","version":"4.2.0","date":"2015-10-12","lts":"Argon","security":false},{"name":"nodejs","version":"4.3.0","date":"2016-02-09","lts":"Argon","security":false},{"name":"nodejs","version":"4.4.0","date":"2016-03-08","lts":"Argon","security":false},{"name":"nodejs","version":"4.5.0","date":"2016-08-16","lts":"Argon","security":false},{"name":"nodejs","version":"4.6.0","date":"2016-09-27","lts":"Argon","security":true},{"name":"nodejs","version":"4.7.0","date":"2016-12-06","lts":"Argon","security":false},{"name":"nodejs","version":"4.8.0","date":"2017-02-21","lts":"Argon","security":false},{"name":"nodejs","version":"4.9.0","date":"2018-03-28","lts":"Argon","security":true},{"name":"nodejs","version":"5.0.0","date":"2015-10-29","lts":false,"security":false},{"name":"nodejs","version":"5.1.0","date":"2015-11-17","lts":false,"security":false},{"name":"nodejs","version":"5.2.0","date":"2015-12-09","lts":false,"security":false},{"name":"nodejs","version":"5.3.0","date":"2015-12-15","lts":false,"security":false},{"name":"nodejs","version":"5.4.0","date":"2016-01-06","lts":false,"security":false},{"name":"nodejs","version":"5.5.0","date":"2016-01-21","lts":false,"security":false},{"name":"nodejs","version":"5.6.0","date":"2016-02-09","lts":false,"security":false},{"name":"nodejs","version":"5.7.0","date":"2016-02-23","lts":false,"security":false},{"name":"nodejs","version":"5.8.0","date":"2016-03-09","lts":false,"security":false},{"name":"nodejs","version":"5.9.0","date":"2016-03-16","lts":false,"security":false},{"name":"nodejs","version":"5.10.0","date":"2016-04-01","lts":false,"security":false},{"name":"nodejs","version":"5.11.0","date":"2016-04-21","lts":false,"security":false},{"name":"nodejs","version":"5.12.0","date":"2016-06-23","lts":false,"security":false},{"name":"nodejs","version":"6.0.0","date":"2016-04-26","lts":false,"security":false},{"name":"nodejs","version":"6.1.0","date":"2016-05-05","lts":false,"security":false},{"name":"nodejs","version":"6.2.0","date":"2016-05-17","lts":false,"security":false},{"name":"nodejs","version":"6.3.0","date":"2016-07-06","lts":false,"security":false},{"name":"nodejs","version":"6.4.0","date":"2016-08-12","lts":false,"security":false},{"name":"nodejs","version":"6.5.0","date":"2016-08-26","lts":false,"security":false},{"name":"nodejs","version":"6.6.0","date":"2016-09-14","lts":false,"security":false},{"name":"nodejs","version":"6.7.0","date":"2016-09-27","lts":false,"security":true},{"name":"nodejs","version":"6.8.0","date":"2016-10-12","lts":false,"security":false},{"name":"nodejs","version":"6.9.0","date":"2016-10-18","lts":"Boron","security":false},{"name":"nodejs","version":"6.10.0","date":"2017-02-21","lts":"Boron","security":false},{"name":"nodejs","version":"6.11.0","date":"2017-06-06","lts":"Boron","security":false},{"name":"nodejs","version":"6.12.0","date":"2017-11-06","lts":"Boron","security":false},{"name":"nodejs","version":"6.13.0","date":"2018-02-10","lts":"Boron","security":false},{"name":"nodejs","version":"6.14.0","date":"2018-03-28","lts":"Boron","security":true},{"name":"nodejs","version":"6.15.0","date":"2018-11-27","lts":"Boron","security":true},{"name":"nodejs","version":"6.16.0","date":"2018-12-26","lts":"Boron","security":false},{"name":"nodejs","version":"6.17.0","date":"2019-02-28","lts":"Boron","security":true},{"name":"nodejs","version":"7.0.0","date":"2016-10-25","lts":false,"security":false},{"name":"nodejs","version":"7.1.0","date":"2016-11-08","lts":false,"security":false},{"name":"nodejs","version":"7.2.0","date":"2016-11-22","lts":false,"security":false},{"name":"nodejs","version":"7.3.0","date":"2016-12-20","lts":false,"security":false},{"name":"nodejs","version":"7.4.0","date":"2017-01-04","lts":false,"security":false},{"name":"nodejs","version":"7.5.0","date":"2017-01-31","lts":false,"security":false},{"name":"nodejs","version":"7.6.0","date":"2017-02-21","lts":false,"security":false},{"name":"nodejs","version":"7.7.0","date":"2017-02-28","lts":false,"security":false},{"name":"nodejs","version":"7.8.0","date":"2017-03-29","lts":false,"security":false},{"name":"nodejs","version":"7.9.0","date":"2017-04-11","lts":false,"security":false},{"name":"nodejs","version":"7.10.0","date":"2017-05-02","lts":false,"security":false},{"name":"nodejs","version":"8.0.0","date":"2017-05-30","lts":false,"security":false},{"name":"nodejs","version":"8.1.0","date":"2017-06-08","lts":false,"security":false},{"name":"nodejs","version":"8.2.0","date":"2017-07-19","lts":false,"security":false},{"name":"nodejs","version":"8.3.0","date":"2017-08-08","lts":false,"security":false},{"name":"nodejs","version":"8.4.0","date":"2017-08-15","lts":false,"security":false},{"name":"nodejs","version":"8.5.0","date":"2017-09-12","lts":false,"security":false},{"name":"nodejs","version":"8.6.0","date":"2017-09-26","lts":false,"security":false},{"name":"nodejs","version":"8.7.0","date":"2017-10-11","lts":false,"security":false},{"name":"nodejs","version":"8.8.0","date":"2017-10-24","lts":false,"security":false},{"name":"nodejs","version":"8.9.0","date":"2017-10-31","lts":"Carbon","security":false},{"name":"nodejs","version":"8.10.0","date":"2018-03-06","lts":"Carbon","security":false},{"name":"nodejs","version":"8.11.0","date":"2018-03-28","lts":"Carbon","security":true},{"name":"nodejs","version":"8.12.0","date":"2018-09-10","lts":"Carbon","security":false},{"name":"nodejs","version":"8.13.0","date":"2018-11-20","lts":"Carbon","security":false},{"name":"nodejs","version":"8.14.0","date":"2018-11-27","lts":"Carbon","security":true},{"name":"nodejs","version":"8.15.0","date":"2018-12-26","lts":"Carbon","security":false},{"name":"nodejs","version":"8.16.0","date":"2019-04-16","lts":"Carbon","security":false},{"name":"nodejs","version":"8.17.0","date":"2019-12-17","lts":"Carbon","security":true},{"name":"nodejs","version":"9.0.0","date":"2017-10-31","lts":false,"security":false},{"name":"nodejs","version":"9.1.0","date":"2017-11-07","lts":false,"security":false},{"name":"nodejs","version":"9.2.0","date":"2017-11-14","lts":false,"security":false},{"name":"nodejs","version":"9.3.0","date":"2017-12-12","lts":false,"security":false},{"name":"nodejs","version":"9.4.0","date":"2018-01-10","lts":false,"security":false},{"name":"nodejs","version":"9.5.0","date":"2018-01-31","lts":false,"security":false},{"name":"nodejs","version":"9.6.0","date":"2018-02-21","lts":false,"security":false},{"name":"nodejs","version":"9.7.0","date":"2018-03-01","lts":false,"security":false},{"name":"nodejs","version":"9.8.0","date":"2018-03-07","lts":false,"security":false},{"name":"nodejs","version":"9.9.0","date":"2018-03-21","lts":false,"security":false},{"name":"nodejs","version":"9.10.0","date":"2018-03-28","lts":false,"security":true},{"name":"nodejs","version":"9.11.0","date":"2018-04-04","lts":false,"security":false},{"name":"nodejs","version":"10.0.0","date":"2018-04-24","lts":false,"security":false},{"name":"nodejs","version":"10.1.0","date":"2018-05-08","lts":false,"security":false},{"name":"nodejs","version":"10.2.0","date":"2018-05-23","lts":false,"security":false},{"name":"nodejs","version":"10.3.0","date":"2018-05-29","lts":false,"security":false},{"name":"nodejs","version":"10.4.0","date":"2018-06-06","lts":false,"security":false},{"name":"nodejs","version":"10.5.0","date":"2018-06-20","lts":false,"security":false},{"name":"nodejs","version":"10.6.0","date":"2018-07-04","lts":false,"security":false},{"name":"nodejs","version":"10.7.0","date":"2018-07-18","lts":false,"security":false},{"name":"nodejs","version":"10.8.0","date":"2018-08-01","lts":false,"security":false},{"name":"nodejs","version":"10.9.0","date":"2018-08-15","lts":false,"security":false},{"name":"nodejs","version":"10.10.0","date":"2018-09-06","lts":false,"security":false},{"name":"nodejs","version":"10.11.0","date":"2018-09-19","lts":false,"security":false},{"name":"nodejs","version":"10.12.0","date":"2018-10-10","lts":false,"security":false},{"name":"nodejs","version":"10.13.0","date":"2018-10-30","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.14.0","date":"2018-11-27","lts":"Dubnium","security":true},{"name":"nodejs","version":"10.15.0","date":"2018-12-26","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.16.0","date":"2019-05-28","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.17.0","date":"2019-10-22","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.18.0","date":"2019-12-17","lts":"Dubnium","security":true},{"name":"nodejs","version":"10.19.0","date":"2020-02-05","lts":"Dubnium","security":true},{"name":"nodejs","version":"10.20.0","date":"2020-03-26","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.21.0","date":"2020-06-02","lts":"Dubnium","security":true},{"name":"nodejs","version":"10.22.0","date":"2020-07-21","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.23.0","date":"2020-10-27","lts":"Dubnium","security":false},{"name":"nodejs","version":"10.24.0","date":"2021-02-23","lts":"Dubnium","security":true},{"name":"nodejs","version":"11.0.0","date":"2018-10-23","lts":false,"security":false},{"name":"nodejs","version":"11.1.0","date":"2018-10-30","lts":false,"security":false},{"name":"nodejs","version":"11.2.0","date":"2018-11-15","lts":false,"security":false},{"name":"nodejs","version":"11.3.0","date":"2018-11-27","lts":false,"security":true},{"name":"nodejs","version":"11.4.0","date":"2018-12-07","lts":false,"security":false},{"name":"nodejs","version":"11.5.0","date":"2018-12-18","lts":false,"security":false},{"name":"nodejs","version":"11.6.0","date":"2018-12-26","lts":false,"security":false},{"name":"nodejs","version":"11.7.0","date":"2019-01-17","lts":false,"security":false},{"name":"nodejs","version":"11.8.0","date":"2019-01-24","lts":false,"security":false},{"name":"nodejs","version":"11.9.0","date":"2019-01-30","lts":false,"security":false},{"name":"nodejs","version":"11.10.0","date":"2019-02-14","lts":false,"security":false},{"name":"nodejs","version":"11.11.0","date":"2019-03-05","lts":false,"security":false},{"name":"nodejs","version":"11.12.0","date":"2019-03-14","lts":false,"security":false},{"name":"nodejs","version":"11.13.0","date":"2019-03-28","lts":false,"security":false},{"name":"nodejs","version":"11.14.0","date":"2019-04-10","lts":false,"security":false},{"name":"nodejs","version":"11.15.0","date":"2019-04-30","lts":false,"security":false},{"name":"nodejs","version":"12.0.0","date":"2019-04-23","lts":false,"security":false},{"name":"nodejs","version":"12.1.0","date":"2019-04-29","lts":false,"security":false},{"name":"nodejs","version":"12.2.0","date":"2019-05-07","lts":false,"security":false},{"name":"nodejs","version":"12.3.0","date":"2019-05-21","lts":false,"security":false},{"name":"nodejs","version":"12.4.0","date":"2019-06-04","lts":false,"security":false},{"name":"nodejs","version":"12.5.0","date":"2019-06-26","lts":false,"security":false},{"name":"nodejs","version":"12.6.0","date":"2019-07-03","lts":false,"security":false},{"name":"nodejs","version":"12.7.0","date":"2019-07-23","lts":false,"security":false},{"name":"nodejs","version":"12.8.0","date":"2019-08-06","lts":false,"security":false},{"name":"nodejs","version":"12.9.0","date":"2019-08-20","lts":false,"security":false},{"name":"nodejs","version":"12.10.0","date":"2019-09-04","lts":false,"security":false},{"name":"nodejs","version":"12.11.0","date":"2019-09-25","lts":false,"security":false},{"name":"nodejs","version":"12.12.0","date":"2019-10-11","lts":false,"security":false},{"name":"nodejs","version":"12.13.0","date":"2019-10-21","lts":"Erbium","security":false},{"name":"nodejs","version":"12.14.0","date":"2019-12-17","lts":"Erbium","security":true},{"name":"nodejs","version":"12.15.0","date":"2020-02-05","lts":"Erbium","security":true},{"name":"nodejs","version":"12.16.0","date":"2020-02-11","lts":"Erbium","security":false},{"name":"nodejs","version":"12.17.0","date":"2020-05-26","lts":"Erbium","security":false},{"name":"nodejs","version":"12.18.0","date":"2020-06-02","lts":"Erbium","security":true},{"name":"nodejs","version":"12.19.0","date":"2020-10-06","lts":"Erbium","security":false},{"name":"nodejs","version":"12.20.0","date":"2020-11-24","lts":"Erbium","security":false},{"name":"nodejs","version":"12.21.0","date":"2021-02-23","lts":"Erbium","security":true},{"name":"nodejs","version":"12.22.0","date":"2021-03-30","lts":"Erbium","security":false},{"name":"nodejs","version":"13.0.0","date":"2019-10-22","lts":false,"security":false},{"name":"nodejs","version":"13.1.0","date":"2019-11-05","lts":false,"security":false},{"name":"nodejs","version":"13.2.0","date":"2019-11-21","lts":false,"security":false},{"name":"nodejs","version":"13.3.0","date":"2019-12-03","lts":false,"security":false},{"name":"nodejs","version":"13.4.0","date":"2019-12-17","lts":false,"security":true},{"name":"nodejs","version":"13.5.0","date":"2019-12-18","lts":false,"security":false},{"name":"nodejs","version":"13.6.0","date":"2020-01-07","lts":false,"security":false},{"name":"nodejs","version":"13.7.0","date":"2020-01-21","lts":false,"security":false},{"name":"nodejs","version":"13.8.0","date":"2020-02-05","lts":false,"security":true},{"name":"nodejs","version":"13.9.0","date":"2020-02-18","lts":false,"security":false},{"name":"nodejs","version":"13.10.0","date":"2020-03-04","lts":false,"security":false},{"name":"nodejs","version":"13.11.0","date":"2020-03-12","lts":false,"security":false},{"name":"nodejs","version":"13.12.0","date":"2020-03-26","lts":false,"security":false},{"name":"nodejs","version":"13.13.0","date":"2020-04-14","lts":false,"security":false},{"name":"nodejs","version":"13.14.0","date":"2020-04-29","lts":false,"security":false},{"name":"nodejs","version":"14.0.0","date":"2020-04-21","lts":false,"security":false},{"name":"nodejs","version":"14.1.0","date":"2020-04-29","lts":false,"security":false},{"name":"nodejs","version":"14.2.0","date":"2020-05-05","lts":false,"security":false},{"name":"nodejs","version":"14.3.0","date":"2020-05-19","lts":false,"security":false},{"name":"nodejs","version":"14.4.0","date":"2020-06-02","lts":false,"security":true},{"name":"nodejs","version":"14.5.0","date":"2020-06-30","lts":false,"security":false},{"name":"nodejs","version":"14.6.0","date":"2020-07-20","lts":false,"security":false},{"name":"nodejs","version":"14.7.0","date":"2020-07-29","lts":false,"security":false},{"name":"nodejs","version":"14.8.0","date":"2020-08-11","lts":false,"security":false},{"name":"nodejs","version":"14.9.0","date":"2020-08-27","lts":false,"security":false},{"name":"nodejs","version":"14.10.0","date":"2020-09-08","lts":false,"security":false},{"name":"nodejs","version":"14.11.0","date":"2020-09-15","lts":false,"security":true},{"name":"nodejs","version":"14.12.0","date":"2020-09-22","lts":false,"security":false},{"name":"nodejs","version":"14.13.0","date":"2020-09-29","lts":false,"security":false},{"name":"nodejs","version":"14.14.0","date":"2020-10-15","lts":false,"security":false},{"name":"nodejs","version":"14.15.0","date":"2020-10-27","lts":"Fermium","security":false},{"name":"nodejs","version":"14.16.0","date":"2021-02-23","lts":"Fermium","security":true},{"name":"nodejs","version":"14.17.0","date":"2021-05-11","lts":"Fermium","security":false},{"name":"nodejs","version":"14.18.0","date":"2021-09-28","lts":"Fermium","security":false},{"name":"nodejs","version":"14.19.0","date":"2022-02-01","lts":"Fermium","security":false},{"name":"nodejs","version":"14.20.0","date":"2022-07-07","lts":"Fermium","security":true},{"name":"nodejs","version":"14.21.0","date":"2022-11-01","lts":"Fermium","security":false},{"name":"nodejs","version":"15.0.0","date":"2020-10-20","lts":false,"security":false},{"name":"nodejs","version":"15.1.0","date":"2020-11-04","lts":false,"security":false},{"name":"nodejs","version":"15.2.0","date":"2020-11-10","lts":false,"security":false},{"name":"nodejs","version":"15.3.0","date":"2020-11-24","lts":false,"security":false},{"name":"nodejs","version":"15.4.0","date":"2020-12-09","lts":false,"security":false},{"name":"nodejs","version":"15.5.0","date":"2020-12-22","lts":false,"security":false},{"name":"nodejs","version":"15.6.0","date":"2021-01-14","lts":false,"security":false},{"name":"nodejs","version":"15.7.0","date":"2021-01-25","lts":false,"security":false},{"name":"nodejs","version":"15.8.0","date":"2021-02-02","lts":false,"security":false},{"name":"nodejs","version":"15.9.0","date":"2021-02-18","lts":false,"security":false},{"name":"nodejs","version":"15.10.0","date":"2021-02-23","lts":false,"security":true},{"name":"nodejs","version":"15.11.0","date":"2021-03-03","lts":false,"security":false},{"name":"nodejs","version":"15.12.0","date":"2021-03-17","lts":false,"security":false},{"name":"nodejs","version":"15.13.0","date":"2021-03-31","lts":false,"security":false},{"name":"nodejs","version":"15.14.0","date":"2021-04-06","lts":false,"security":false},{"name":"nodejs","version":"16.0.0","date":"2021-04-20","lts":false,"security":false},{"name":"nodejs","version":"16.1.0","date":"2021-05-04","lts":false,"security":false},{"name":"nodejs","version":"16.2.0","date":"2021-05-19","lts":false,"security":false},{"name":"nodejs","version":"16.3.0","date":"2021-06-03","lts":false,"security":false},{"name":"nodejs","version":"16.4.0","date":"2021-06-23","lts":false,"security":false},{"name":"nodejs","version":"16.5.0","date":"2021-07-14","lts":false,"security":false},{"name":"nodejs","version":"16.6.0","date":"2021-07-29","lts":false,"security":true},{"name":"nodejs","version":"16.7.0","date":"2021-08-18","lts":false,"security":false},{"name":"nodejs","version":"16.8.0","date":"2021-08-25","lts":false,"security":false},{"name":"nodejs","version":"16.9.0","date":"2021-09-07","lts":false,"security":false},{"name":"nodejs","version":"16.10.0","date":"2021-09-22","lts":false,"security":false},{"name":"nodejs","version":"16.11.0","date":"2021-10-08","lts":false,"security":false},{"name":"nodejs","version":"16.12.0","date":"2021-10-20","lts":false,"security":false},{"name":"nodejs","version":"16.13.0","date":"2021-10-26","lts":"Gallium","security":false},{"name":"nodejs","version":"16.14.0","date":"2022-02-08","lts":"Gallium","security":false},{"name":"nodejs","version":"16.15.0","date":"2022-04-26","lts":"Gallium","security":false},{"name":"nodejs","version":"16.16.0","date":"2022-07-07","lts":"Gallium","security":true},{"name":"nodejs","version":"16.17.0","date":"2022-08-16","lts":"Gallium","security":false},{"name":"nodejs","version":"16.18.0","date":"2022-10-12","lts":"Gallium","security":false},{"name":"nodejs","version":"16.19.0","date":"2022-12-13","lts":"Gallium","security":false},{"name":"nodejs","version":"16.20.0","date":"2023-03-28","lts":"Gallium","security":false},{"name":"nodejs","version":"17.0.0","date":"2021-10-19","lts":false,"security":false},{"name":"nodejs","version":"17.1.0","date":"2021-11-09","lts":false,"security":false},{"name":"nodejs","version":"17.2.0","date":"2021-11-30","lts":false,"security":false},{"name":"nodejs","version":"17.3.0","date":"2021-12-17","lts":false,"security":false},{"name":"nodejs","version":"17.4.0","date":"2022-01-18","lts":false,"security":false},{"name":"nodejs","version":"17.5.0","date":"2022-02-10","lts":false,"security":false},{"name":"nodejs","version":"17.6.0","date":"2022-02-22","lts":false,"security":false},{"name":"nodejs","version":"17.7.0","date":"2022-03-09","lts":false,"security":false},{"name":"nodejs","version":"17.8.0","date":"2022-03-22","lts":false,"security":false},{"name":"nodejs","version":"17.9.0","date":"2022-04-07","lts":false,"security":false},{"name":"nodejs","version":"18.0.0","date":"2022-04-18","lts":false,"security":false},{"name":"nodejs","version":"18.1.0","date":"2022-05-03","lts":false,"security":false},{"name":"nodejs","version":"18.2.0","date":"2022-05-17","lts":false,"security":false},{"name":"nodejs","version":"18.3.0","date":"2022-06-02","lts":false,"security":false},{"name":"nodejs","version":"18.4.0","date":"2022-06-16","lts":false,"security":false},{"name":"nodejs","version":"18.5.0","date":"2022-07-06","lts":false,"security":true},{"name":"nodejs","version":"18.6.0","date":"2022-07-13","lts":false,"security":false},{"name":"nodejs","version":"18.7.0","date":"2022-07-26","lts":false,"security":false},{"name":"nodejs","version":"18.8.0","date":"2022-08-24","lts":false,"security":false},{"name":"nodejs","version":"18.9.0","date":"2022-09-07","lts":false,"security":false},{"name":"nodejs","version":"18.10.0","date":"2022-09-28","lts":false,"security":false},{"name":"nodejs","version":"18.11.0","date":"2022-10-13","lts":false,"security":false},{"name":"nodejs","version":"18.12.0","date":"2022-10-25","lts":"Hydrogen","security":false},{"name":"nodejs","version":"18.13.0","date":"2023-01-05","lts":"Hydrogen","security":false},{"name":"nodejs","version":"18.14.0","date":"2023-02-01","lts":"Hydrogen","security":false},{"name":"nodejs","version":"18.15.0","date":"2023-03-05","lts":"Hydrogen","security":false},{"name":"nodejs","version":"18.16.0","date":"2023-04-12","lts":"Hydrogen","security":false},{"name":"nodejs","version":"19.0.0","date":"2022-10-17","lts":false,"security":false},{"name":"nodejs","version":"19.1.0","date":"2022-11-14","lts":false,"security":false},{"name":"nodejs","version":"19.2.0","date":"2022-11-29","lts":false,"security":false},{"name":"nodejs","version":"19.3.0","date":"2022-12-14","lts":false,"security":false},{"name":"nodejs","version":"19.4.0","date":"2023-01-05","lts":false,"security":false},{"name":"nodejs","version":"19.5.0","date":"2023-01-24","lts":false,"security":false},{"name":"nodejs","version":"19.6.0","date":"2023-02-01","lts":false,"security":false},{"name":"nodejs","version":"19.7.0","date":"2023-02-21","lts":false,"security":false},{"name":"nodejs","version":"19.8.0","date":"2023-03-14","lts":false,"security":false},{"name":"nodejs","version":"19.9.0","date":"2023-04-10","lts":false,"security":false},{"name":"nodejs","version":"20.0.0","date":"2023-04-17","lts":false,"security":false},{"name":"nodejs","version":"20.1.0","date":"2023-05-03","lts":false,"security":false},{"name":"nodejs","version":"20.2.0","date":"2023-05-16","lts":false,"security":false}]
\ No newline at end of file
+[{"name":"nodejs","version":"0.2.0","date":"2011-08-26","lts":false,"security":false,"v8":"2.3.8.0"},{"name":"nodejs","version":"0.3.0","date":"2011-08-26","lts":false,"security":false,"v8":"2.5.1.0"},{"name":"nodejs","version":"0.4.0","date":"2011-08-26","lts":false,"security":false,"v8":"3.1.2.0"},{"name":"nodejs","version":"0.5.0","date":"2011-08-26","lts":false,"security":false,"v8":"3.1.8.25"},{"name":"nodejs","version":"0.6.0","date":"2011-11-04","lts":false,"security":false,"v8":"3.6.6.6"},{"name":"nodejs","version":"0.7.0","date":"2012-01-17","lts":false,"security":false,"v8":"3.8.6.0"},{"name":"nodejs","version":"0.8.0","date":"2012-06-22","lts":false,"security":false,"v8":"3.11.10.10"},{"name":"nodejs","version":"0.9.0","date":"2012-07-20","lts":false,"security":false,"v8":"3.11.10.15"},{"name":"nodejs","version":"0.10.0","date":"2013-03-11","lts":false,"security":false,"v8":"3.14.5.8"},{"name":"nodejs","version":"0.11.0","date":"2013-03-28","lts":false,"security":false,"v8":"3.17.13.0"},{"name":"nodejs","version":"0.12.0","date":"2015-02-06","lts":false,"security":false,"v8":"3.28.73.0"},{"name":"nodejs","version":"4.0.0","date":"2015-09-08","lts":false,"security":false,"v8":"4.5.103.30"},{"name":"nodejs","version":"4.1.0","date":"2015-09-17","lts":false,"security":false,"v8":"4.5.103.33"},{"name":"nodejs","version":"4.2.0","date":"2015-10-12","lts":"Argon","security":false,"v8":"4.5.103.35"},{"name":"nodejs","version":"4.3.0","date":"2016-02-09","lts":"Argon","security":false,"v8":"4.5.103.35"},{"name":"nodejs","version":"4.4.0","date":"2016-03-08","lts":"Argon","security":false,"v8":"4.5.103.35"},{"name":"nodejs","version":"4.5.0","date":"2016-08-16","lts":"Argon","security":false,"v8":"4.5.103.37"},{"name":"nodejs","version":"4.6.0","date":"2016-09-27","lts":"Argon","security":true,"v8":"4.5.103.37"},{"name":"nodejs","version":"4.7.0","date":"2016-12-06","lts":"Argon","security":false,"v8":"4.5.103.43"},{"name":"nodejs","version":"4.8.0","date":"2017-02-21","lts":"Argon","security":false,"v8":"4.5.103.45"},{"name":"nodejs","version":"4.9.0","date":"2018-03-28","lts":"Argon","security":true,"v8":"4.5.103.53"},{"name":"nodejs","version":"5.0.0","date":"2015-10-29","lts":false,"security":false,"v8":"4.6.85.28"},{"name":"nodejs","version":"5.1.0","date":"2015-11-17","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.2.0","date":"2015-12-09","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.3.0","date":"2015-12-15","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.4.0","date":"2016-01-06","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.5.0","date":"2016-01-21","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.6.0","date":"2016-02-09","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.7.0","date":"2016-02-23","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.8.0","date":"2016-03-09","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.9.0","date":"2016-03-16","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.10.0","date":"2016-04-01","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.11.0","date":"2016-04-21","lts":false,"security":false,"v8":"4.6.85.31"},{"name":"nodejs","version":"5.12.0","date":"2016-06-23","lts":false,"security":false,"v8":"4.6.85.32"},{"name":"nodejs","version":"6.0.0","date":"2016-04-26","lts":false,"security":false,"v8":"5.0.71.35"},{"name":"nodejs","version":"6.1.0","date":"2016-05-05","lts":false,"security":false,"v8":"5.0.71.35"},{"name":"nodejs","version":"6.2.0","date":"2016-05-17","lts":false,"security":false,"v8":"5.0.71.47"},{"name":"nodejs","version":"6.3.0","date":"2016-07-06","lts":false,"security":false,"v8":"5.0.71.52"},{"name":"nodejs","version":"6.4.0","date":"2016-08-12","lts":false,"security":false,"v8":"5.0.71.60"},{"name":"nodejs","version":"6.5.0","date":"2016-08-26","lts":false,"security":false,"v8":"5.1.281.81"},{"name":"nodejs","version":"6.6.0","date":"2016-09-14","lts":false,"security":false,"v8":"5.1.281.83"},{"name":"nodejs","version":"6.7.0","date":"2016-09-27","lts":false,"security":true,"v8":"5.1.281.83"},{"name":"nodejs","version":"6.8.0","date":"2016-10-12","lts":false,"security":false,"v8":"5.1.281.84"},{"name":"nodejs","version":"6.9.0","date":"2016-10-18","lts":"Boron","security":false,"v8":"5.1.281.84"},{"name":"nodejs","version":"6.10.0","date":"2017-02-21","lts":"Boron","security":false,"v8":"5.1.281.93"},{"name":"nodejs","version":"6.11.0","date":"2017-06-06","lts":"Boron","security":false,"v8":"5.1.281.102"},{"name":"nodejs","version":"6.12.0","date":"2017-11-06","lts":"Boron","security":false,"v8":"5.1.281.108"},{"name":"nodejs","version":"6.13.0","date":"2018-02-10","lts":"Boron","security":false,"v8":"5.1.281.111"},{"name":"nodejs","version":"6.14.0","date":"2018-03-28","lts":"Boron","security":true,"v8":"5.1.281.111"},{"name":"nodejs","version":"6.15.0","date":"2018-11-27","lts":"Boron","security":true,"v8":"5.1.281.111"},{"name":"nodejs","version":"6.16.0","date":"2018-12-26","lts":"Boron","security":false,"v8":"5.1.281.111"},{"name":"nodejs","version":"6.17.0","date":"2019-02-28","lts":"Boron","security":true,"v8":"5.1.281.111"},{"name":"nodejs","version":"7.0.0","date":"2016-10-25","lts":false,"security":false,"v8":"5.4.500.36"},{"name":"nodejs","version":"7.1.0","date":"2016-11-08","lts":false,"security":false,"v8":"5.4.500.36"},{"name":"nodejs","version":"7.2.0","date":"2016-11-22","lts":false,"security":false,"v8":"5.4.500.43"},{"name":"nodejs","version":"7.3.0","date":"2016-12-20","lts":false,"security":false,"v8":"5.4.500.45"},{"name":"nodejs","version":"7.4.0","date":"2017-01-04","lts":false,"security":false,"v8":"5.4.500.45"},{"name":"nodejs","version":"7.5.0","date":"2017-01-31","lts":false,"security":false,"v8":"5.4.500.48"},{"name":"nodejs","version":"7.6.0","date":"2017-02-21","lts":false,"security":false,"v8":"5.5.372.40"},{"name":"nodejs","version":"7.7.0","date":"2017-02-28","lts":false,"security":false,"v8":"5.5.372.41"},{"name":"nodejs","version":"7.8.0","date":"2017-03-29","lts":false,"security":false,"v8":"5.5.372.43"},{"name":"nodejs","version":"7.9.0","date":"2017-04-11","lts":false,"security":false,"v8":"5.5.372.43"},{"name":"nodejs","version":"7.10.0","date":"2017-05-02","lts":false,"security":false,"v8":"5.5.372.43"},{"name":"nodejs","version":"8.0.0","date":"2017-05-30","lts":false,"security":false,"v8":"5.8.283.41"},{"name":"nodejs","version":"8.1.0","date":"2017-06-08","lts":false,"security":false,"v8":"5.8.283.41"},{"name":"nodejs","version":"8.2.0","date":"2017-07-19","lts":false,"security":false,"v8":"5.8.283.41"},{"name":"nodejs","version":"8.3.0","date":"2017-08-08","lts":false,"security":false,"v8":"6.0.286.52"},{"name":"nodejs","version":"8.4.0","date":"2017-08-15","lts":false,"security":false,"v8":"6.0.286.52"},{"name":"nodejs","version":"8.5.0","date":"2017-09-12","lts":false,"security":false,"v8":"6.0.287.53"},{"name":"nodejs","version":"8.6.0","date":"2017-09-26","lts":false,"security":false,"v8":"6.0.287.53"},{"name":"nodejs","version":"8.7.0","date":"2017-10-11","lts":false,"security":false,"v8":"6.1.534.42"},{"name":"nodejs","version":"8.8.0","date":"2017-10-24","lts":false,"security":false,"v8":"6.1.534.42"},{"name":"nodejs","version":"8.9.0","date":"2017-10-31","lts":"Carbon","security":false,"v8":"6.1.534.46"},{"name":"nodejs","version":"8.10.0","date":"2018-03-06","lts":"Carbon","security":false,"v8":"6.2.414.50"},{"name":"nodejs","version":"8.11.0","date":"2018-03-28","lts":"Carbon","security":true,"v8":"6.2.414.50"},{"name":"nodejs","version":"8.12.0","date":"2018-09-10","lts":"Carbon","security":false,"v8":"6.2.414.66"},{"name":"nodejs","version":"8.13.0","date":"2018-11-20","lts":"Carbon","security":false,"v8":"6.2.414.72"},{"name":"nodejs","version":"8.14.0","date":"2018-11-27","lts":"Carbon","security":true,"v8":"6.2.414.72"},{"name":"nodejs","version":"8.15.0","date":"2018-12-26","lts":"Carbon","security":false,"v8":"6.2.414.75"},{"name":"nodejs","version":"8.16.0","date":"2019-04-16","lts":"Carbon","security":false,"v8":"6.2.414.77"},{"name":"nodejs","version":"8.17.0","date":"2019-12-17","lts":"Carbon","security":true,"v8":"6.2.414.78"},{"name":"nodejs","version":"9.0.0","date":"2017-10-31","lts":false,"security":false,"v8":"6.2.414.32"},{"name":"nodejs","version":"9.1.0","date":"2017-11-07","lts":false,"security":false,"v8":"6.2.414.32"},{"name":"nodejs","version":"9.2.0","date":"2017-11-14","lts":false,"security":false,"v8":"6.2.414.44"},{"name":"nodejs","version":"9.3.0","date":"2017-12-12","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.4.0","date":"2018-01-10","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.5.0","date":"2018-01-31","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.6.0","date":"2018-02-21","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.7.0","date":"2018-03-01","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.8.0","date":"2018-03-07","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.9.0","date":"2018-03-21","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.10.0","date":"2018-03-28","lts":false,"security":true,"v8":"6.2.414.46"},{"name":"nodejs","version":"9.11.0","date":"2018-04-04","lts":false,"security":false,"v8":"6.2.414.46"},{"name":"nodejs","version":"10.0.0","date":"2018-04-24","lts":false,"security":false,"v8":"6.6.346.24"},{"name":"nodejs","version":"10.1.0","date":"2018-05-08","lts":false,"security":false,"v8":"6.6.346.27"},{"name":"nodejs","version":"10.2.0","date":"2018-05-23","lts":false,"security":false,"v8":"6.6.346.32"},{"name":"nodejs","version":"10.3.0","date":"2018-05-29","lts":false,"security":false,"v8":"6.6.346.32"},{"name":"nodejs","version":"10.4.0","date":"2018-06-06","lts":false,"security":false,"v8":"6.7.288.43"},{"name":"nodejs","version":"10.5.0","date":"2018-06-20","lts":false,"security":false,"v8":"6.7.288.46"},{"name":"nodejs","version":"10.6.0","date":"2018-07-04","lts":false,"security":false,"v8":"6.7.288.46"},{"name":"nodejs","version":"10.7.0","date":"2018-07-18","lts":false,"security":false,"v8":"6.7.288.49"},{"name":"nodejs","version":"10.8.0","date":"2018-08-01","lts":false,"security":false,"v8":"6.7.288.49"},{"name":"nodejs","version":"10.9.0","date":"2018-08-15","lts":false,"security":false,"v8":"6.8.275.24"},{"name":"nodejs","version":"10.10.0","date":"2018-09-06","lts":false,"security":false,"v8":"6.8.275.30"},{"name":"nodejs","version":"10.11.0","date":"2018-09-19","lts":false,"security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.12.0","date":"2018-10-10","lts":false,"security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.13.0","date":"2018-10-30","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.14.0","date":"2018-11-27","lts":"Dubnium","security":true,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.15.0","date":"2018-12-26","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.16.0","date":"2019-05-28","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.17.0","date":"2019-10-22","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.18.0","date":"2019-12-17","lts":"Dubnium","security":true,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.19.0","date":"2020-02-05","lts":"Dubnium","security":true,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.20.0","date":"2020-03-26","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.21.0","date":"2020-06-02","lts":"Dubnium","security":true,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.22.0","date":"2020-07-21","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.23.0","date":"2020-10-27","lts":"Dubnium","security":false,"v8":"6.8.275.32"},{"name":"nodejs","version":"10.24.0","date":"2021-02-23","lts":"Dubnium","security":true,"v8":"6.8.275.32"},{"name":"nodejs","version":"11.0.0","date":"2018-10-23","lts":false,"security":false,"v8":"7.0.276.28"},{"name":"nodejs","version":"11.1.0","date":"2018-10-30","lts":false,"security":false,"v8":"7.0.276.32"},{"name":"nodejs","version":"11.2.0","date":"2018-11-15","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.3.0","date":"2018-11-27","lts":false,"security":true,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.4.0","date":"2018-12-07","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.5.0","date":"2018-12-18","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.6.0","date":"2018-12-26","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.7.0","date":"2019-01-17","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.8.0","date":"2019-01-24","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.9.0","date":"2019-01-30","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.10.0","date":"2019-02-14","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.11.0","date":"2019-03-05","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.12.0","date":"2019-03-14","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.13.0","date":"2019-03-28","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.14.0","date":"2019-04-10","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"11.15.0","date":"2019-04-30","lts":false,"security":false,"v8":"7.0.276.38"},{"name":"nodejs","version":"12.0.0","date":"2019-04-23","lts":false,"security":false,"v8":"7.4.288.21"},{"name":"nodejs","version":"12.1.0","date":"2019-04-29","lts":false,"security":false,"v8":"7.4.288.21"},{"name":"nodejs","version":"12.2.0","date":"2019-05-07","lts":false,"security":false,"v8":"7.4.288.21"},{"name":"nodejs","version":"12.3.0","date":"2019-05-21","lts":false,"security":false,"v8":"7.4.288.27"},{"name":"nodejs","version":"12.4.0","date":"2019-06-04","lts":false,"security":false,"v8":"7.4.288.27"},{"name":"nodejs","version":"12.5.0","date":"2019-06-26","lts":false,"security":false,"v8":"7.5.288.22"},{"name":"nodejs","version":"12.6.0","date":"2019-07-03","lts":false,"security":false,"v8":"7.5.288.22"},{"name":"nodejs","version":"12.7.0","date":"2019-07-23","lts":false,"security":false,"v8":"7.5.288.22"},{"name":"nodejs","version":"12.8.0","date":"2019-08-06","lts":false,"security":false,"v8":"7.5.288.22"},{"name":"nodejs","version":"12.9.0","date":"2019-08-20","lts":false,"security":false,"v8":"7.6.303.29"},{"name":"nodejs","version":"12.10.0","date":"2019-09-04","lts":false,"security":false,"v8":"7.6.303.29"},{"name":"nodejs","version":"12.11.0","date":"2019-09-25","lts":false,"security":false,"v8":"7.7.299.11"},{"name":"nodejs","version":"12.12.0","date":"2019-10-11","lts":false,"security":false,"v8":"7.7.299.13"},{"name":"nodejs","version":"12.13.0","date":"2019-10-21","lts":"Erbium","security":false,"v8":"7.7.299.13"},{"name":"nodejs","version":"12.14.0","date":"2019-12-17","lts":"Erbium","security":true,"v8":"7.7.299.13"},{"name":"nodejs","version":"12.15.0","date":"2020-02-05","lts":"Erbium","security":true,"v8":"7.7.299.13"},{"name":"nodejs","version":"12.16.0","date":"2020-02-11","lts":"Erbium","security":false,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.17.0","date":"2020-05-26","lts":"Erbium","security":false,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.18.0","date":"2020-06-02","lts":"Erbium","security":true,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.19.0","date":"2020-10-06","lts":"Erbium","security":false,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.20.0","date":"2020-11-24","lts":"Erbium","security":false,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.21.0","date":"2021-02-23","lts":"Erbium","security":true,"v8":"7.8.279.23"},{"name":"nodejs","version":"12.22.0","date":"2021-03-30","lts":"Erbium","security":false,"v8":"7.8.279.23"},{"name":"nodejs","version":"13.0.0","date":"2019-10-22","lts":false,"security":false,"v8":"7.8.279.17"},{"name":"nodejs","version":"13.1.0","date":"2019-11-05","lts":false,"security":false,"v8":"7.8.279.17"},{"name":"nodejs","version":"13.2.0","date":"2019-11-21","lts":false,"security":false,"v8":"7.9.317.23"},{"name":"nodejs","version":"13.3.0","date":"2019-12-03","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.4.0","date":"2019-12-17","lts":false,"security":true,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.5.0","date":"2019-12-18","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.6.0","date":"2020-01-07","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.7.0","date":"2020-01-21","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.8.0","date":"2020-02-05","lts":false,"security":true,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.9.0","date":"2020-02-18","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.10.0","date":"2020-03-04","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.11.0","date":"2020-03-12","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.12.0","date":"2020-03-26","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.13.0","date":"2020-04-14","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"13.14.0","date":"2020-04-29","lts":false,"security":false,"v8":"7.9.317.25"},{"name":"nodejs","version":"14.0.0","date":"2020-04-21","lts":false,"security":false,"v8":"8.1.307.30"},{"name":"nodejs","version":"14.1.0","date":"2020-04-29","lts":false,"security":false,"v8":"8.1.307.31"},{"name":"nodejs","version":"14.2.0","date":"2020-05-05","lts":false,"security":false,"v8":"8.1.307.31"},{"name":"nodejs","version":"14.3.0","date":"2020-05-19","lts":false,"security":false,"v8":"8.1.307.31"},{"name":"nodejs","version":"14.4.0","date":"2020-06-02","lts":false,"security":true,"v8":"8.1.307.31"},{"name":"nodejs","version":"14.5.0","date":"2020-06-30","lts":false,"security":false,"v8":"8.3.110.9"},{"name":"nodejs","version":"14.6.0","date":"2020-07-20","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.7.0","date":"2020-07-29","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.8.0","date":"2020-08-11","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.9.0","date":"2020-08-27","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.10.0","date":"2020-09-08","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.11.0","date":"2020-09-15","lts":false,"security":true,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.12.0","date":"2020-09-22","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.13.0","date":"2020-09-29","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.14.0","date":"2020-10-15","lts":false,"security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.15.0","date":"2020-10-27","lts":"Fermium","security":false,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.16.0","date":"2021-02-23","lts":"Fermium","security":true,"v8":"8.4.371.19"},{"name":"nodejs","version":"14.17.0","date":"2021-05-11","lts":"Fermium","security":false,"v8":"8.4.371.23"},{"name":"nodejs","version":"14.18.0","date":"2021-09-28","lts":"Fermium","security":false,"v8":"8.4.371.23"},{"name":"nodejs","version":"14.19.0","date":"2022-02-01","lts":"Fermium","security":false,"v8":"8.4.371.23"},{"name":"nodejs","version":"14.20.0","date":"2022-07-07","lts":"Fermium","security":true,"v8":"8.4.371.23"},{"name":"nodejs","version":"14.21.0","date":"2022-11-01","lts":"Fermium","security":false,"v8":"8.4.371.23"},{"name":"nodejs","version":"15.0.0","date":"2020-10-20","lts":false,"security":false,"v8":"8.6.395.16"},{"name":"nodejs","version":"15.1.0","date":"2020-11-04","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.2.0","date":"2020-11-10","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.3.0","date":"2020-11-24","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.4.0","date":"2020-12-09","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.5.0","date":"2020-12-22","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.6.0","date":"2021-01-14","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.7.0","date":"2021-01-25","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.8.0","date":"2021-02-02","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.9.0","date":"2021-02-18","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.10.0","date":"2021-02-23","lts":false,"security":true,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.11.0","date":"2021-03-03","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.12.0","date":"2021-03-17","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.13.0","date":"2021-03-31","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"15.14.0","date":"2021-04-06","lts":false,"security":false,"v8":"8.6.395.17"},{"name":"nodejs","version":"16.0.0","date":"2021-04-20","lts":false,"security":false,"v8":"9.0.257.17"},{"name":"nodejs","version":"16.1.0","date":"2021-05-04","lts":false,"security":false,"v8":"9.0.257.24"},{"name":"nodejs","version":"16.2.0","date":"2021-05-19","lts":false,"security":false,"v8":"9.0.257.25"},{"name":"nodejs","version":"16.3.0","date":"2021-06-03","lts":false,"security":false,"v8":"9.0.257.25"},{"name":"nodejs","version":"16.4.0","date":"2021-06-23","lts":false,"security":false,"v8":"9.1.269.36"},{"name":"nodejs","version":"16.5.0","date":"2021-07-14","lts":false,"security":false,"v8":"9.1.269.38"},{"name":"nodejs","version":"16.6.0","date":"2021-07-29","lts":false,"security":true,"v8":"9.2.230.21"},{"name":"nodejs","version":"16.7.0","date":"2021-08-18","lts":false,"security":false,"v8":"9.2.230.21"},{"name":"nodejs","version":"16.8.0","date":"2021-08-25","lts":false,"security":false,"v8":"9.2.230.21"},{"name":"nodejs","version":"16.9.0","date":"2021-09-07","lts":false,"security":false,"v8":"9.3.345.16"},{"name":"nodejs","version":"16.10.0","date":"2021-09-22","lts":false,"security":false,"v8":"9.3.345.19"},{"name":"nodejs","version":"16.11.0","date":"2021-10-08","lts":false,"security":false,"v8":"9.4.146.19"},{"name":"nodejs","version":"16.12.0","date":"2021-10-20","lts":false,"security":false,"v8":"9.4.146.19"},{"name":"nodejs","version":"16.13.0","date":"2021-10-26","lts":"Gallium","security":false,"v8":"9.4.146.19"},{"name":"nodejs","version":"16.14.0","date":"2022-02-08","lts":"Gallium","security":false,"v8":"9.4.146.24"},{"name":"nodejs","version":"16.15.0","date":"2022-04-26","lts":"Gallium","security":false,"v8":"9.4.146.24"},{"name":"nodejs","version":"16.16.0","date":"2022-07-07","lts":"Gallium","security":true,"v8":"9.4.146.24"},{"name":"nodejs","version":"16.17.0","date":"2022-08-16","lts":"Gallium","security":false,"v8":"9.4.146.26"},{"name":"nodejs","version":"16.18.0","date":"2022-10-12","lts":"Gallium","security":false,"v8":"9.4.146.26"},{"name":"nodejs","version":"16.19.0","date":"2022-12-13","lts":"Gallium","security":false,"v8":"9.4.146.26"},{"name":"nodejs","version":"16.20.0","date":"2023-03-28","lts":"Gallium","security":false,"v8":"9.4.146.26"},{"name":"nodejs","version":"17.0.0","date":"2021-10-19","lts":false,"security":false,"v8":"9.5.172.21"},{"name":"nodejs","version":"17.1.0","date":"2021-11-09","lts":false,"security":false,"v8":"9.5.172.25"},{"name":"nodejs","version":"17.2.0","date":"2021-11-30","lts":false,"security":false,"v8":"9.6.180.14"},{"name":"nodejs","version":"17.3.0","date":"2021-12-17","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.4.0","date":"2022-01-18","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.5.0","date":"2022-02-10","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.6.0","date":"2022-02-22","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.7.0","date":"2022-03-09","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.8.0","date":"2022-03-22","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"17.9.0","date":"2022-04-07","lts":false,"security":false,"v8":"9.6.180.15"},{"name":"nodejs","version":"18.0.0","date":"2022-04-18","lts":false,"security":false,"v8":"10.1.124.8"},{"name":"nodejs","version":"18.1.0","date":"2022-05-03","lts":false,"security":false,"v8":"10.1.124.8"},{"name":"nodejs","version":"18.2.0","date":"2022-05-17","lts":false,"security":false,"v8":"10.1.124.8"},{"name":"nodejs","version":"18.3.0","date":"2022-06-02","lts":false,"security":false,"v8":"10.2.154.4"},{"name":"nodejs","version":"18.4.0","date":"2022-06-16","lts":false,"security":false,"v8":"10.2.154.4"},{"name":"nodejs","version":"18.5.0","date":"2022-07-06","lts":false,"security":true,"v8":"10.2.154.4"},{"name":"nodejs","version":"18.6.0","date":"2022-07-13","lts":false,"security":false,"v8":"10.2.154.13"},{"name":"nodejs","version":"18.7.0","date":"2022-07-26","lts":false,"security":false,"v8":"10.2.154.13"},{"name":"nodejs","version":"18.8.0","date":"2022-08-24","lts":false,"security":false,"v8":"10.2.154.13"},{"name":"nodejs","version":"18.9.0","date":"2022-09-07","lts":false,"security":false,"v8":"10.2.154.15"},{"name":"nodejs","version":"18.10.0","date":"2022-09-28","lts":false,"security":false,"v8":"10.2.154.15"},{"name":"nodejs","version":"18.11.0","date":"2022-10-13","lts":false,"security":false,"v8":"10.2.154.15"},{"name":"nodejs","version":"18.12.0","date":"2022-10-25","lts":"Hydrogen","security":false,"v8":"10.2.154.15"},{"name":"nodejs","version":"18.13.0","date":"2023-01-05","lts":"Hydrogen","security":false,"v8":"10.2.154.23"},{"name":"nodejs","version":"18.14.0","date":"2023-02-01","lts":"Hydrogen","security":false,"v8":"10.2.154.23"},{"name":"nodejs","version":"18.15.0","date":"2023-03-05","lts":"Hydrogen","security":false,"v8":"10.2.154.26"},{"name":"nodejs","version":"18.16.0","date":"2023-04-12","lts":"Hydrogen","security":false,"v8":"10.2.154.26"},{"name":"nodejs","version":"19.0.0","date":"2022-10-17","lts":false,"security":false,"v8":"10.7.193.13"},{"name":"nodejs","version":"19.1.0","date":"2022-11-14","lts":false,"security":false,"v8":"10.7.193.20"},{"name":"nodejs","version":"19.2.0","date":"2022-11-29","lts":false,"security":false,"v8":"10.8.168.20"},{"name":"nodejs","version":"19.3.0","date":"2022-12-14","lts":false,"security":false,"v8":"10.8.168.21"},{"name":"nodejs","version":"19.4.0","date":"2023-01-05","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"19.5.0","date":"2023-01-24","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"19.6.0","date":"2023-02-01","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"19.7.0","date":"2023-02-21","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"19.8.0","date":"2023-03-14","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"19.9.0","date":"2023-04-10","lts":false,"security":false,"v8":"10.8.168.25"},{"name":"nodejs","version":"20.0.0","date":"2023-04-17","lts":false,"security":false,"v8":"11.3.244.4"},{"name":"nodejs","version":"20.1.0","date":"2023-05-03","lts":false,"security":false,"v8":"11.3.244.8"},{"name":"nodejs","version":"20.2.0","date":"2023-05-16","lts":false,"security":false,"v8":"11.3.244.8"}]
\ No newline at end of file
diff --git a/tools/node_modules/eslint/node_modules/node-releases/package.json b/tools/node_modules/eslint/node_modules/node-releases/package.json
index aa655e3a1ceec2..c3ab96ed1f7e16 100644
--- a/tools/node_modules/eslint/node_modules/node-releases/package.json
+++ b/tools/node_modules/eslint/node_modules/node-releases/package.json
@@ -1,7 +1,8 @@
{
"name": "node-releases",
- "version": "2.0.11",
+ "version": "2.0.12",
"description": "Node.js releases data",
+ "type": "module",
"scripts": {
"build": "node scripts/build.js"
},
diff --git a/tools/node_modules/eslint/package.json b/tools/node_modules/eslint/package.json
index cc636323756de7..42ad0ca896a5d4 100644
--- a/tools/node_modules/eslint/package.json
+++ b/tools/node_modules/eslint/package.json
@@ -1,6 +1,6 @@
{
"name": "eslint",
- "version": "8.41.0",
+ "version": "8.42.0",
"author": "Nicholas C. Zakas ",
"description": "An AST-based pattern checker for JavaScript.",
"bin": {
@@ -63,8 +63,8 @@
"@eslint-community/eslint-utils": "^4.2.0",
"@eslint-community/regexpp": "^4.4.0",
"@eslint/eslintrc": "^2.0.3",
- "@eslint/js": "8.41.0",
- "@humanwhocodes/config-array": "^0.11.8",
+ "@eslint/js": "8.42.0",
+ "@humanwhocodes/config-array": "^0.11.10",
"@humanwhocodes/module-importer": "^1.0.1",
"@nodelib/fs.walk": "^1.2.8",
"ajv": "^6.10.0",
diff --git a/tools/snapshot/node_mksnapshot.cc b/tools/snapshot/node_mksnapshot.cc
index d6d92ab156da62..ecc295acdbea32 100644
--- a/tools/snapshot/node_mksnapshot.cc
+++ b/tools/snapshot/node_mksnapshot.cc
@@ -87,7 +87,7 @@ int BuildSnapshot(int argc, char* argv[]) {
node::ExitCode exit_code = node::ExitCode::kNoFailure;
{
exit_code = node::SnapshotBuilder::Generate(
- out, result->args(), result->exec_args());
+ out, result->args(), result->exec_args(), std::nullopt);
if (exit_code == node::ExitCode::kNoFailure) {
if (!out) {
std::cerr << "Failed to write " << out_path << "\n";