diff --git a/dist/views/privacy.html b/dist/views/privacy.html index 8ebc89b..bb0be8b 100644 --- a/dist/views/privacy.html +++ b/dist/views/privacy.html @@ -23,10 +23,10 @@
-

Privacy Policy

+

Privacy Policy

How we treat your data


-
+
This website is committed to protecting your privacy and does not collect or store any data of any type from you or your device. This means that we do not use cookies, trackers, analytics, or any other tools that may collect or store your personal information, browsing history, preferences, or any other data that may identify you or your device. We also do not keep any logs of your visits, requests, or interactions with our website.

We respect your right to privacy and anonymity and we only provide you with the content and services that you request from us. We do not share, sell, rent, or disclose any data of any type to any third parties for any purpose. diff --git a/dist/views/result.html b/dist/views/result.html index 969388e..13cf1a8 100644 --- a/dist/views/result.html +++ b/dist/views/result.html @@ -38,10 +38,10 @@
-

<%= title %>

+

<%= title %>

<%= description %>


-
<%= summary %>
+
<%= summary %>
\ No newline at end of file diff --git a/dist/views/search.html b/dist/views/search.html index 6041ae4..8fe30af 100644 --- a/dist/views/search.html +++ b/dist/views/search.html @@ -38,12 +38,12 @@
-

<%= title %>

+

<%= title %>

<%= description %>


<% for (let i = 1; i < search_results.length; i++) { %> - + <% } %>
diff --git a/dist/views/terms.html b/dist/views/terms.html index 28c4476..111b940 100644 --- a/dist/views/terms.html +++ b/dist/views/terms.html @@ -23,10 +23,10 @@
-

Terms and Conditions

+

Terms and Conditions

The rules You have to follow to use our website


-
+
These terms and conditions govern your use of miniWikipedia, a website that provides you with concise summaries of various topics. By using our website, you agree to these terms and conditions and accept our privacy policy.

Our website is free of charge and you can access and use it for your personal, non-commercial purposes only. You may not use our website for any commercial purposes without our prior written consent. You may not copy, reproduce, distribute, modify, or create derivative works from our website or any of its content for commercial pruposes without our prior written consent. You may not use our website in any way that is unlawful, harmful, fraudulent, or infringes the rights of others. diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 33174f3..92e8fd8 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -628,9 +628,9 @@ } }, "node_modules/npm": { - "version": "9.7.1", - "resolved": "https://registry.npmjs.org/npm/-/npm-9.7.1.tgz", - "integrity": "sha512-kxMviaiLX4Lfnjy2dt7EWB87v5QdLiGpy04S2ORdKLmPqFhgy8g4cgJjQfnWob4mJIaNHjBO+hk45CvLlsZZ8g==", + "version": "9.8.0", + "resolved": "https://registry.npmjs.org/npm/-/npm-9.8.0.tgz", + "integrity": "sha512-AXeiBAdfM5K2jvBwA7EGLKeYyt0VnhmJRnlq4k2+M0Ao9v7yKJBqF8xFPzQL8kAybzwlfpTPCZwM4uTIszb3xA==", "bundleDependencies": [ "@isaacs/string-locale-compare", "@npmcli/arborist", @@ -690,6 +690,7 @@ "semver", "sigstore", "ssri", + "supports-color", "tar", "text-table", "tiny-relative-date", @@ -700,14 +701,14 @@ ], "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^6.2.9", - "@npmcli/config": "^6.2.0", + "@npmcli/arborist": "^6.3.0", + "@npmcli/config": "^6.2.1", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^3.1.1", + "@npmcli/package-json": "^4.0.0", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.2", + "cacache": "^17.1.3", "chalk": "^5.2.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", @@ -715,21 +716,21 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.4", + "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", - "ini": "^4.1.0", + "ini": "^4.1.1", "init-package-json": "^5.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.2", - "libnpmdiff": "^5.0.17", - "libnpmexec": "^6.0.0", - "libnpmfund": "^4.0.17", + "libnpmdiff": "^5.0.19", + "libnpmexec": "^6.0.2", + "libnpmfund": "^4.0.19", "libnpmhook": "^9.0.3", "libnpmorg": "^5.0.4", - "libnpmpack": "^5.0.17", - "libnpmpublish": "^7.3.0", + "libnpmpack": "^5.0.19", + "libnpmpublish": "^7.5.0", "libnpmsearch": "^6.0.2", "libnpmteam": "^5.0.3", "libnpmversion": "^4.0.2", @@ -738,8 +739,8 @@ "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", - "node-gyp": "^9.3.1", - "nopt": "^7.1.0", + "node-gyp": "^9.4.0", + "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", @@ -749,15 +750,16 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.1.3", + "pacote": "^15.2.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", "read": "^2.1.0", - "semver": "^7.5.1", - "sigstore": "^1.5.0", + "semver": "^7.5.2", + "sigstore": "^1.7.0", "ssri": "^10.0.4", - "tar": "^6.1.14", + "supports-color": "^9.3.1", + "tar": "^6.1.15", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", @@ -782,11 +784,6 @@ "node": ">=0.1.90" } }, - "node_modules/npm/node_modules/@gar/promisify": { - "version": "1.1.3", - "inBundle": true, - "license": "MIT" - }, "node_modules/npm/node_modules/@isaacs/cliui": { "version": "8.0.2", "inBundle": true, @@ -836,7 +833,7 @@ } }, "node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.0.1", + "version": "7.1.0", "inBundle": true, "license": "MIT", "dependencies": { @@ -855,7 +852,7 @@ "license": "ISC" }, "node_modules/npm/node_modules/@npmcli/arborist": { - "version": "6.2.9", + "version": "6.3.0", "inBundle": true, "license": "ISC", "dependencies": { @@ -866,7 +863,7 @@ "@npmcli/metavuln-calculator": "^5.0.0", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^3.0.0", + "@npmcli/package-json": "^4.0.0", "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", @@ -901,11 +898,12 @@ } }, "node_modules/npm/node_modules/@npmcli/config": { - "version": "6.2.0", + "version": "6.2.1", "inBundle": true, "license": "ISC", "dependencies": { "@npmcli/map-workspaces": "^3.0.2", + "ci-info": "^3.8.0", "ini": "^4.1.0", "nopt": "^7.0.0", "proc-log": "^3.0.0", @@ -1000,18 +998,6 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, - "node_modules/npm/node_modules/@npmcli/move-file": { - "version": "2.0.1", - "inBundle": true, - "license": "MIT", - "dependencies": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/npm/node_modules/@npmcli/name-from-folder": { "version": "2.0.0", "inBundle": true, @@ -1029,7 +1015,7 @@ } }, "node_modules/npm/node_modules/@npmcli/package-json": { - "version": "3.1.1", + "version": "4.0.0", "inBundle": true, "license": "ISC", "dependencies": { @@ -1098,6 +1084,18 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/npm/node_modules/@sigstore/tuf": { + "version": "1.0.2", + "inBundle": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.1.0", + "tuf-js": "^1.1.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, "node_modules/npm/node_modules/@tootallnate/once": { "version": "2.0.0", "inBundle": true, @@ -1311,7 +1309,7 @@ } }, "node_modules/npm/node_modules/cacache": { - "version": "17.1.2", + "version": "17.1.3", "inBundle": true, "license": "ISC", "dependencies": { @@ -1616,6 +1614,11 @@ "node": ">=0.8.x" } }, + "node_modules/npm/node_modules/exponential-backoff": { + "version": "3.1.1", + "inBundle": true, + "license": "Apache-2.0" + }, "node_modules/npm/node_modules/fastest-levenshtein": { "version": "1.0.16", "inBundle": true, @@ -1679,14 +1682,14 @@ } }, "node_modules/npm/node_modules/glob": { - "version": "10.2.4", + "version": "10.2.7", "inBundle": true, "license": "ISC", "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2", "path-scurry": "^1.7.0" }, "bin": { @@ -1827,11 +1830,6 @@ "node": ">=8" } }, - "node_modules/npm/node_modules/infer-owner": { - "version": "1.0.4", - "inBundle": true, - "license": "ISC" - }, "node_modules/npm/node_modules/inflight": { "version": "1.0.6", "inBundle": true, @@ -1847,7 +1845,7 @@ "license": "ISC" }, "node_modules/npm/node_modules/ini": { - "version": "4.1.0", + "version": "4.1.1", "inBundle": true, "license": "ISC", "engines": { @@ -1896,7 +1894,7 @@ } }, "node_modules/npm/node_modules/is-core-module": { - "version": "2.12.0", + "version": "2.12.1", "inBundle": true, "license": "MIT", "dependencies": { @@ -1925,7 +1923,7 @@ "license": "ISC" }, "node_modules/npm/node_modules/jackspeak": { - "version": "2.2.0", + "version": "2.2.1", "inBundle": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -1988,11 +1986,11 @@ } }, "node_modules/npm/node_modules/libnpmdiff": { - "version": "5.0.17", + "version": "5.0.19", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/disparity-colors": "^3.0.0", "@npmcli/installed-package-contents": "^2.0.2", "binary-extensions": "^2.2.0", @@ -2007,11 +2005,11 @@ } }, "node_modules/npm/node_modules/libnpmexec": { - "version": "6.0.0", + "version": "6.0.2", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "ci-info": "^3.7.1", "npm-package-arg": "^10.1.0", @@ -2028,11 +2026,11 @@ } }, "node_modules/npm/node_modules/libnpmfund": { - "version": "4.0.17", + "version": "4.0.19", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^6.2.9" + "@npmcli/arborist": "^6.3.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -2063,11 +2061,11 @@ } }, "node_modules/npm/node_modules/libnpmpack": { - "version": "5.0.17", + "version": "5.0.19", "inBundle": true, "license": "ISC", "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", "pacote": "^15.0.8" @@ -2077,7 +2075,7 @@ } }, "node_modules/npm/node_modules/libnpmpublish": { - "version": "7.3.0", + "version": "7.5.0", "inBundle": true, "license": "ISC", "dependencies": { @@ -2166,7 +2164,7 @@ } }, "node_modules/npm/node_modules/minimatch": { - "version": "9.0.0", + "version": "9.0.1", "inBundle": true, "license": "ISC", "dependencies": { @@ -2367,14 +2365,15 @@ } }, "node_modules/npm/node_modules/node-gyp": { - "version": "9.3.1", + "version": "9.4.0", "inBundle": true, "license": "MIT", "dependencies": { "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^11.0.3", "nopt": "^6.0.0", "npmlog": "^6.0.0", "rimraf": "^3.0.2", @@ -2389,18 +2388,6 @@ "node": "^12.13 || ^14.13 || >=16" } }, - "node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs": { - "version": "2.1.2", - "inBundle": true, - "license": "ISC", - "dependencies": { - "@gar/promisify": "^1.1.3", - "semver": "^7.3.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/npm/node_modules/node-gyp/node_modules/abbrev": { "version": "1.1.1", "inBundle": true, @@ -2427,82 +2414,6 @@ "concat-map": "0.0.1" } }, - "node_modules/npm/node_modules/node-gyp/node_modules/cacache": { - "version": "16.1.3", - "inBundle": true, - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^2.1.0", - "@npmcli/move-file": "^2.0.0", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^8.0.1", - "infer-owner": "^1.0.4", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^9.0.0", - "tar": "^6.1.11", - "unique-filename": "^2.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion": { - "version": "2.0.1", - "inBundle": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob": { - "version": "8.1.0", - "inBundle": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch": { - "version": "5.1.6", - "inBundle": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass": { - "version": "2.1.0", - "inBundle": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/npm/node_modules/node-gyp/node_modules/gauge": { "version": "4.0.4", "inBundle": true, @@ -2540,32 +2451,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen": { - "version": "10.2.1", - "inBundle": true, - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^16.1.0", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.3", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^9.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/npm/node_modules/node-gyp/node_modules/minimatch": { "version": "3.1.2", "inBundle": true, @@ -2577,33 +2462,6 @@ "node": "*" } }, - "node_modules/npm/node_modules/node-gyp/node_modules/minipass": { - "version": "3.3.6", - "inBundle": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch": { - "version": "2.1.2", - "inBundle": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.1.6", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, "node_modules/npm/node_modules/node-gyp/node_modules/nopt": { "version": "6.0.0", "inBundle": true, @@ -2650,39 +2508,6 @@ "inBundle": true, "license": "ISC" }, - "node_modules/npm/node_modules/node-gyp/node_modules/ssri": { - "version": "9.0.1", - "inBundle": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.1.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/unique-filename": { - "version": "2.0.1", - "inBundle": true, - "license": "ISC", - "dependencies": { - "unique-slug": "^3.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/npm/node_modules/node-gyp/node_modules/unique-slug": { - "version": "3.0.0", - "inBundle": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, "node_modules/npm/node_modules/node-gyp/node_modules/which": { "version": "2.0.2", "inBundle": true, @@ -2698,7 +2523,7 @@ } }, "node_modules/npm/node_modules/nopt": { - "version": "7.1.0", + "version": "7.2.0", "inBundle": true, "license": "ISC", "dependencies": { @@ -2876,7 +2701,7 @@ } }, "node_modules/npm/node_modules/pacote": { - "version": "15.1.3", + "version": "15.2.0", "inBundle": true, "license": "ISC", "dependencies": { @@ -2936,12 +2761,12 @@ } }, "node_modules/npm/node_modules/path-scurry": { - "version": "1.9.1", + "version": "1.9.2", "inBundle": true, "license": "BlueOak-1.0.0", "dependencies": { "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.0" + "minipass": "^5.0.0 || ^6.0.2" }, "engines": { "node": ">=16 || 14 >=14.17" @@ -3057,7 +2882,7 @@ } }, "node_modules/npm/node_modules/read-package-json": { - "version": "6.0.3", + "version": "6.0.4", "inBundle": true, "license": "ISC", "dependencies": { @@ -3158,7 +2983,21 @@ } }, "node_modules/npm/node_modules/safe-buffer": { - "version": "5.1.2", + "version": "5.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], "inBundle": true, "license": "MIT" }, @@ -3169,7 +3008,7 @@ "optional": true }, "node_modules/npm/node_modules/semver": { - "version": "7.5.1", + "version": "7.5.2", "inBundle": true, "license": "ISC", "dependencies": { @@ -3229,13 +3068,13 @@ } }, "node_modules/npm/node_modules/sigstore": { - "version": "1.5.2", + "version": "1.7.0", "inBundle": true, "license": "Apache-2.0", "dependencies": { "@sigstore/protobuf-specs": "^0.1.0", - "make-fetch-happen": "^11.0.1", - "tuf-js": "^1.1.3" + "@sigstore/tuf": "^1.0.1", + "make-fetch-happen": "^11.0.1" }, "bin": { "sigstore": "bin/sigstore.js" @@ -3319,11 +3158,11 @@ } }, "node_modules/npm/node_modules/string_decoder": { - "version": "1.1.1", + "version": "1.3.0", "inBundle": true, "license": "MIT", "dependencies": { - "safe-buffer": "~5.1.0" + "safe-buffer": "~5.2.0" } }, "node_modules/npm/node_modules/string-width": { @@ -3376,8 +3215,19 @@ "node": ">=8" } }, + "node_modules/npm/node_modules/supports-color": { + "version": "9.3.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, "node_modules/npm/node_modules/tar": { - "version": "6.1.14", + "version": "6.1.15", "inBundle": true, "license": "ISC", "dependencies": { @@ -3433,13 +3283,13 @@ } }, "node_modules/npm/node_modules/tuf-js": { - "version": "1.1.6", + "version": "1.1.7", "inBundle": true, "license": "MIT", "dependencies": { "@tufjs/models": "1.0.4", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.0" + "make-fetch-happen": "^11.1.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" @@ -3604,7 +3454,7 @@ } }, "node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi": { - "version": "7.0.1", + "version": "7.1.0", "inBundle": true, "license": "MIT", "dependencies": { diff --git a/node_modules/npm/bin/node-gyp-bin/node-gyp b/node_modules/npm/bin/node-gyp-bin/node-gyp deleted file mode 100755 index 70efb6f..0000000 --- a/node_modules/npm/bin/node-gyp-bin/node-gyp +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env sh -if [ "x$npm_config_node_gyp" = "x" ]; then - node "`dirname "$0"`/../../node_modules/node-gyp/bin/node-gyp.js" "$@" -else - "$npm_config_node_gyp" "$@" -fi diff --git a/node_modules/npm/bin/node-gyp-bin/node-gyp.cmd b/node_modules/npm/bin/node-gyp-bin/node-gyp.cmd deleted file mode 100755 index 1ef2ae0..0000000 --- a/node_modules/npm/bin/node-gyp-bin/node-gyp.cmd +++ /dev/null @@ -1,5 +0,0 @@ -if not defined npm_config_node_gyp ( - node "%~dp0\..\..\node_modules\node-gyp\bin\node-gyp.js" %* -) else ( - node "%npm_config_node_gyp%" %* -) diff --git a/node_modules/npm/bin/npm b/node_modules/npm/bin/npm index a131a53..7f210b9 100755 --- a/node_modules/npm/bin/npm +++ b/node_modules/npm/bin/npm @@ -1,4 +1,8 @@ #!/usr/bin/env bash + +# This is used by the Node.js installer, which expects the cygwin/mingw +# shell script to already be present in the npm dependency folder. + (set -o igncr) 2>/dev/null && set -o igncr; # cygwin encoding fix basedir=`dirname "$0"` @@ -7,6 +11,16 @@ case `uname` in *CYGWIN*) basedir=`cygpath -w "$basedir"`;; esac +if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then + IS_WSL="true" +fi + +function no_node_dir { + # if this didn't work, then everything else below will fail + echo "Could not determine Node.js install directory" >&2 + exit 1 +} + NODE_EXE="$basedir/node.exe" if ! [ -x "$NODE_EXE" ]; then NODE_EXE="$basedir/node" @@ -17,14 +31,20 @@ fi # this path is passed to node.exe, so it needs to match whatever # kind of paths Node.js thinks it's using, typically win32 paths. -CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)')" +CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)' 2> /dev/null)" +if [ $? -ne 0 ]; then + # this fails under WSL 1 so add an additional message. we also suppress stderr above + # because the actual error raised is not helpful. in WSL 1 node.exe cannot handle + # output redirection properly. See https://github.com/microsoft/WSL/issues/2370 + if [ "$IS_WSL" == "true" ]; then + echo "WSL 1 is not supported. Please upgrade to WSL 2 or above." >&2 + fi + no_node_dir +fi NPM_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-cli.js" - NPM_PREFIX=`"$NODE_EXE" "$NPM_CLI_JS" prefix -g` if [ $? -ne 0 ]; then - # if this didn't work, then everything else below will fail - echo "Could not determine Node.js install directory" >&2 - exit 1 + no_node_dir fi NPM_PREFIX_NPM_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npm-cli.js" @@ -34,7 +54,7 @@ NPM_WSL_PATH="/.." # WSL can run Windows binaries, so we have to give it the win32 path # however, WSL bash tests against posix paths, so we need to construct that # to know if npm is installed globally. -if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then +if [ "$IS_WSL" == "true" ]; then NPM_WSL_PATH=`wslpath "$NPM_PREFIX_NPM_CLI_JS"` fi if [ -f "$NPM_PREFIX_NPM_CLI_JS" ] || [ -f "$NPM_WSL_PATH" ]; then diff --git a/node_modules/npm/bin/npm.ps1 b/node_modules/npm/bin/npm.ps1 new file mode 100644 index 0000000..f2f236a --- /dev/null +++ b/node_modules/npm/bin/npm.ps1 @@ -0,0 +1,35 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 + +$nodeexe = "node$exe" +$nodebin = $(Get-Command $nodeexe -ErrorAction SilentlyContinue -ErrorVariable F).Source +if ($nodebin -eq $null) { + Write-Host "$nodeexe not found." + exit 1 +} +$nodedir = $(New-Object -ComObject Scripting.FileSystemObject).GetFile("$nodebin").ParentFolder.Path + +$npmclijs="$nodedir/node_modules/npm/bin/npm-cli.js" +$npmprefix=(& $nodeexe $npmclijs prefix -g) +if ($LASTEXITCODE -ne 0) { + Write-Host "Could not determine Node.js install directory" + exit 1 +} +$npmprefixclijs="$npmprefix/node_modules/npm/bin/npm-cli.js" + +# Support pipeline input +if ($MyInvocation.ExpectingInput) { + $input | & $nodeexe $npmprefixclijs $args +} else { + & $nodeexe $npmprefixclijs $args +} +$ret=$LASTEXITCODE +exit $ret diff --git a/node_modules/npm/bin/npx b/node_modules/npm/bin/npx index a34e345..719ff8e 100755 --- a/node_modules/npm/bin/npx +++ b/node_modules/npm/bin/npx @@ -11,6 +11,16 @@ case `uname` in *CYGWIN*) basedir=`cygpath -w "$basedir"`;; esac +if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then + IS_WSL="true" +fi + +function no_node_dir { + # if this didn't work, then everything else below will fail + echo "Could not determine Node.js install directory" >&2 + exit 1 +} + NODE_EXE="$basedir/node.exe" if ! [ -x "$NODE_EXE" ]; then NODE_EXE="$basedir/node" @@ -19,17 +29,24 @@ if ! [ -x "$NODE_EXE" ]; then NODE_EXE=node fi -# these paths are passed to node.exe, so they need to match whatever +# this path is passed to node.exe, so it needs to match whatever # kind of paths Node.js thinks it's using, typically win32 paths. -CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)')" +CLI_BASEDIR="$("$NODE_EXE" -p 'require("path").dirname(process.execPath)' 2> /dev/null)" if [ $? -ne 0 ]; then - # if this didn't work, then everything else below will fail - echo "Could not determine Node.js install directory" >&2 - exit 1 + # this fails under WSL 1 so add an additional message. we also suppress stderr above + # because the actual error raised is not helpful. in WSL 1 node.exe cannot handle + # output redirection properly. See https://github.com/microsoft/WSL/issues/2370 + if [ "$IS_WSL" == "true" ]; then + echo "WSL 1 is not supported. Please upgrade to WSL 2 or above." >&2 + fi + no_node_dir fi NPM_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npm-cli.js" NPX_CLI_JS="$CLI_BASEDIR/node_modules/npm/bin/npx-cli.js" NPM_PREFIX=`"$NODE_EXE" "$NPM_CLI_JS" prefix -g` +if [ $? -ne 0 ]; then + no_node_dir +fi NPM_PREFIX_NPX_CLI_JS="$NPM_PREFIX/node_modules/npm/bin/npx-cli.js" # a path that will fail -f test on any posix bash @@ -38,7 +55,7 @@ NPX_WSL_PATH="/.." # WSL can run Windows binaries, so we have to give it the win32 path # however, WSL bash tests against posix paths, so we need to construct that # to know if npm is installed globally. -if [ `uname` = 'Linux' ] && type wslpath &>/dev/null ; then +if [ "$IS_WSL" == "true" ]; then NPX_WSL_PATH=`wslpath "$NPM_PREFIX_NPX_CLI_JS"` fi if [ -f "$NPM_PREFIX_NPX_CLI_JS" ] || [ -f "$NPX_WSL_PATH" ]; then diff --git a/node_modules/npm/bin/npx-cli.js b/node_modules/npm/bin/npx-cli.js index 75090ae..17d96fb 100755 --- a/node_modules/npm/bin/npx-cli.js +++ b/node_modules/npm/bin/npx-cli.js @@ -24,7 +24,7 @@ const removed = new Set([ ...removedOpts, ]) -const { definitions, shorthands } = require('../lib/utils/config/index.js') +const { definitions, shorthands } = require('@npmcli/config/lib/definitions') const npmSwitches = Object.entries(definitions) .filter(([key, { type }]) => type === Boolean || (Array.isArray(type) && type.includes(Boolean))) diff --git a/node_modules/npm/bin/npx.ps1 b/node_modules/npm/bin/npx.ps1 new file mode 100644 index 0000000..437e2a7 --- /dev/null +++ b/node_modules/npm/bin/npx.ps1 @@ -0,0 +1,35 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 + +$nodeexe = "node$exe" +$nodebin = $(Get-Command $nodeexe -ErrorAction SilentlyContinue -ErrorVariable F).Source +if ($nodebin -eq $null) { + Write-Host "$nodeexe not found." + exit 1 +} +$nodedir = $(New-Object -ComObject Scripting.FileSystemObject).GetFile("$nodebin").ParentFolder.Path + +$npmclijs="$nodedir/node_modules/npm/bin/npm-cli.js" +$npmprefix=(& $nodeexe $npmclijs prefix -g) +if ($LASTEXITCODE -ne 0) { + Write-Host "Could not determine Node.js install directory" + exit 1 +} +$npmprefixclijs="$npmprefix/node_modules/npm/bin/npx-cli.js" + +# Support pipeline input +if ($MyInvocation.ExpectingInput) { + $input | & $nodeexe $npmprefixclijs $args +} else { + & $nodeexe $npmprefixclijs $args +} +$ret=$LASTEXITCODE +exit $ret diff --git a/node_modules/npm/docs/content/commands/npm-install-test.md b/node_modules/npm/docs/content/commands/npm-install-test.md index a2136fd..587a0a1 100644 --- a/node_modules/npm/docs/content/commands/npm-install-test.md +++ b/node_modules/npm/docs/content/commands/npm-install-test.md @@ -160,6 +160,22 @@ will also prevent _writing_ `package-lock.json` if `save` is true. +#### `package-lock-only` + +* Default: false +* Type: Boolean + +If set to true, the current operation will only use the `package-lock.json`, +ignoring `node_modules`. + +For `update` this means only the `package-lock.json` will be updated, +instead of checking `node_modules` and downloading dependencies. + +For `list` this means the output will be based on the tree described by the +`package-lock.json`, rather than the contents of `node_modules`. + + + #### `foreground-scripts` * Default: false diff --git a/node_modules/npm/docs/content/commands/npm-install.md b/node_modules/npm/docs/content/commands/npm-install.md index 912a48b..9abf054 100644 --- a/node_modules/npm/docs/content/commands/npm-install.md +++ b/node_modules/npm/docs/content/commands/npm-install.md @@ -550,6 +550,22 @@ will also prevent _writing_ `package-lock.json` if `save` is true. +#### `package-lock-only` + +* Default: false +* Type: Boolean + +If set to true, the current operation will only use the `package-lock.json`, +ignoring `node_modules`. + +For `update` this means only the `package-lock.json` will be updated, +instead of checking `node_modules` and downloading dependencies. + +For `list` this means the output will be based on the tree described by the +`package-lock.json`, rather than the contents of `node_modules`. + + + #### `foreground-scripts` * Default: false diff --git a/node_modules/npm/docs/content/commands/npm-ls.md b/node_modules/npm/docs/content/commands/npm-ls.md index 3091ebf..9f9e074 100644 --- a/node_modules/npm/docs/content/commands/npm-ls.md +++ b/node_modules/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@9.7.1 /path/to/npm +npm@9.8.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/node_modules/npm/docs/content/commands/npm-pkg.md b/node_modules/npm/docs/content/commands/npm-pkg.md index 1df2a8d..79f2e96 100644 --- a/node_modules/npm/docs/content/commands/npm-pkg.md +++ b/node_modules/npm/docs/content/commands/npm-pkg.md @@ -12,6 +12,7 @@ npm pkg get [ [ ...]] npm pkg delete [ ...] npm pkg set [[].= ...] npm pkg set [[].= ...] +npm pkg fix ``` ### Description @@ -141,6 +142,13 @@ Returned values are always in **json** format. npm pkg delete scripts.build ``` +* `npm pkg fix` + + Auto corrects common errors in your `package.json`. npm already + does this during `publish`, which leads to subtle (mostly harmless) + differences between the contents of your `package.json` file and the + manifest that npm uses during installation. + ### Workspaces support You can set/get/delete items across your configured workspaces by using the diff --git a/node_modules/npm/docs/content/commands/npm.md b/node_modules/npm/docs/content/commands/npm.md index 4cd80b8..bf73b46 100644 --- a/node_modules/npm/docs/content/commands/npm.md +++ b/node_modules/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -9.7.1 +9.8.0 ### Description diff --git a/node_modules/npm/docs/content/configuring-npm/package-json.md b/node_modules/npm/docs/content/configuring-npm/package-json.md index 7334c5b..219296f 100644 --- a/node_modules/npm/docs/content/configuring-npm/package-json.md +++ b/node_modules/npm/docs/content/configuring-npm/package-json.md @@ -279,9 +279,6 @@ it will. The `.npmignore` file works just like a `.gitignore`. If there is a `.gitignore` file, and `.npmignore` is missing, `.gitignore`'s contents will be used instead. -Files included with the "package.json#files" field _cannot_ be excluded -through `.npmignore` or `.gitignore`. - Certain files are always included, regardless of settings: * `package.json` diff --git a/node_modules/npm/docs/output/commands/npm-install-test.html b/node_modules/npm/docs/output/commands/npm-install-test.html index 5c0fb7c..d707d68 100644 --- a/node_modules/npm/docs/output/commands/npm-install-test.html +++ b/node_modules/npm/docs/output/commands/npm-install-test.html @@ -142,7 +142,7 @@

npm-install-test

Table of contents

- +

Synopsis

@@ -259,6 +259,17 @@

package-lock

If set to false, then ignore package-lock.json files when installing. This will also prevent writing package-lock.json if save is true.

+

package-lock-only

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

If set to true, the current operation will only use the package-lock.json, +ignoring node_modules.

+

For update this means only the package-lock.json will be updated, +instead of checking node_modules and downloading dependencies.

+

For list this means the output will be based on the tree described by the +package-lock.json, rather than the contents of node_modules.

foreground-scripts

If set to false, then ignore package-lock.json files when installing. This will also prevent writing package-lock.json if save is true.

+

package-lock-only

+
    +
  • Default: false
  • +
  • Type: Boolean
  • +
+

If set to true, the current operation will only use the package-lock.json, +ignoring node_modules.

+

For update this means only the package-lock.json will be updated, +instead of checking node_modules and downloading dependencies.

+

For list this means the output will be based on the tree described by the +package-lock.json, rather than the contents of node_modules.

foreground-scripts

  • Default: false
  • diff --git a/node_modules/npm/docs/output/commands/npm-ls.html b/node_modules/npm/docs/output/commands/npm-ls.html index 36f582e..85cc2d7 100644 --- a/node_modules/npm/docs/output/commands/npm-ls.html +++ b/node_modules/npm/docs/output/commands/npm-ls.html @@ -160,7 +160,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@9.7.1 /path/to/npm
    +
    npm@9.8.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/node_modules/npm/docs/output/commands/npm-pkg.html b/node_modules/npm/docs/output/commands/npm-pkg.html index 2105472..0a0b841 100644 --- a/node_modules/npm/docs/output/commands/npm-pkg.html +++ b/node_modules/npm/docs/output/commands/npm-pkg.html @@ -151,6 +151,7 @@

    Table of contents

    npm pkg delete <key> [<key> ...] npm pkg set [<array>[<index>].<key>=<value> ...] npm pkg set [<array>[].<key>=<value> ...] +npm pkg fix

    Description

    A command that automates the management of package.json files. @@ -236,6 +237,13 @@

    Description

    npm pkg delete scripts.build
     
    +
  • +

    npm pkg fix

    +

    Auto corrects common errors in your package.json. npm already +does this during publish, which leads to subtle (mostly harmless) +differences between the contents of your package.json file and the +manifest that npm uses during installation.

    +

Workspaces support

You can set/get/delete items across your configured workspaces by using the diff --git a/node_modules/npm/docs/output/commands/npm.html b/node_modules/npm/docs/output/commands/npm.html index 5e4ca68..5e34bb2 100644 --- a/node_modules/npm/docs/output/commands/npm.html +++ b/node_modules/npm/docs/output/commands/npm.html @@ -150,7 +150,7 @@

Table of contents

Note: This command is unaware of workspaces.

Version

-

9.7.1

+

9.8.0

Description

npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/node_modules/npm/docs/output/configuring-npm/package-json.html b/node_modules/npm/docs/output/configuring-npm/package-json.html index 2e1bd00..64e0622 100644 --- a/node_modules/npm/docs/output/configuring-npm/package-json.html +++ b/node_modules/npm/docs/output/configuring-npm/package-json.html @@ -353,8 +353,6 @@

files

it will. The .npmignore file works just like a .gitignore. If there is a .gitignore file, and .npmignore is missing, .gitignore's contents will be used instead.

-

Files included with the "package.json#files" field cannot be excluded -through .npmignore or .gitignore.

Certain files are always included, regardless of settings:

  • package.json
  • diff --git a/node_modules/npm/lib/base-command.js b/node_modules/npm/lib/base-command.js index 598964c..e763820 100644 --- a/node_modules/npm/lib/base-command.js +++ b/node_modules/npm/lib/base-command.js @@ -2,7 +2,7 @@ const { relative } = require('path') -const definitions = require('./utils/config/definitions.js') +const { definitions } = require('@npmcli/config/lib/definitions') const getWorkspaces = require('./workspaces/get-workspaces.js') const { aliases: cmdAliases } = require('./utils/cmd-list') diff --git a/node_modules/npm/lib/commands/completion.js b/node_modules/npm/lib/commands/completion.js index 38205ad..59113c5 100644 --- a/node_modules/npm/lib/commands/completion.js +++ b/node_modules/npm/lib/commands/completion.js @@ -34,7 +34,7 @@ const nopt = require('nopt') const { resolve } = require('path') const Npm = require('../npm.js') -const { definitions, shorthands } = require('../utils/config/index.js') +const { definitions, shorthands } = require('@npmcli/config/lib/definitions') const { commands, aliases, deref } = require('../utils/cmd-list.js') const configNames = Object.keys(definitions) const shorthandNames = Object.keys(shorthands) diff --git a/node_modules/npm/lib/commands/config.js b/node_modules/npm/lib/commands/config.js index b49cdd6..8e8358f 100644 --- a/node_modules/npm/lib/commands/config.js +++ b/node_modules/npm/lib/commands/config.js @@ -1,6 +1,3 @@ -// don't expand so that we only assemble the set of defaults when needed -const configDefs = require('../utils/config/index.js') - const { mkdir, readFile, writeFile } = require('fs/promises') const { dirname, resolve } = require('path') const { spawn } = require('child_process') @@ -8,6 +5,7 @@ const { EOL } = require('os') const ini = require('ini') const localeCompare = require('@isaacs/string-locale-compare')('en') const pkgJson = require('@npmcli/package-json') +const { defaults, definitions } = require('@npmcli/config/lib/definitions') const log = require('../utils/log-shim.js') // These are the configs that we can nerf-dart. Not all of them currently even @@ -102,7 +100,7 @@ class Config extends BaseCommand { case 'get': case 'delete': case 'rm': - return Object.keys(configDefs.definitions) + return Object.keys(definitions) case 'edit': case 'list': case 'ls': @@ -219,7 +217,7 @@ class Config extends BaseCommand { const data = ( await readFile(file, 'utf8').catch(() => '') ).replace(/\r\n/g, '\n') - const entries = Object.entries(configDefs.defaults) + const entries = Object.entries(defaults) const defData = entries.reduce((str, [key, val]) => { const obj = { [key]: val } const i = ini.stringify(obj) diff --git a/node_modules/npm/lib/commands/doctor.js b/node_modules/npm/lib/commands/doctor.js index 19262e5..96e3437 100644 --- a/node_modules/npm/lib/commands/doctor.js +++ b/node_modules/npm/lib/commands/doctor.js @@ -9,9 +9,7 @@ const semver = require('semver') const { promisify } = require('util') const log = require('../utils/log-shim.js') const ping = require('../utils/ping.js') -const { - registry: { default: defaultRegistry }, -} = require('../utils/config/definitions.js') +const { defaults } = require('@npmcli/config/lib/definitions') const lstat = promisify(fs.lstat) const readdir = promisify(fs.readdir) const access = promisify(fs.access) @@ -364,16 +362,17 @@ class Doctor extends BaseCommand { } async checkNpmRegistry () { - if (this.npm.flatOptions.registry !== defaultRegistry) { - throw `Try \`npm config set registry=${defaultRegistry}\`` + if (this.npm.flatOptions.registry !== defaults.registry) { + throw `Try \`npm config set registry=${defaults.registry}\`` } else { - return `using default registry (${defaultRegistry})` + return `using default registry (${defaults.registry})` } } output (row) { const t = new Table({ - chars: { top: '', + chars: { + top: '', 'top-mid': '', 'top-left': '', 'top-right': '', @@ -387,8 +386,17 @@ class Doctor extends BaseCommand { 'mid-mid': '', right: '', 'right-mid': '', - middle: ' ' }, - style: { 'padding-left': 0, 'padding-right': 0 }, + middle: ' ', + }, + style: { + 'padding-left': 0, + 'padding-right': 0, + // setting border here is not necessary visually since we've already + // zeroed out all the chars above, but without it cli-table3 will wrap + // some of the separator spaces with ansi codes which show up in + // snapshots. + border: 0, + }, colWidths: [this.#checkWidth, 6], }) t.push(row) diff --git a/node_modules/npm/lib/commands/install.js b/node_modules/npm/lib/commands/install.js index 2bfd20a..75f0e2f 100644 --- a/node_modules/npm/lib/commands/install.js +++ b/node_modules/npm/lib/commands/install.js @@ -27,6 +27,7 @@ class Install extends ArboristWorkspaceCmd { 'strict-peer-deps', 'prefer-dedupe', 'package-lock', + 'package-lock-only', 'foreground-scripts', 'ignore-scripts', 'audit', diff --git a/node_modules/npm/lib/commands/pkg.js b/node_modules/npm/lib/commands/pkg.js index 5cdcd20..29bd4e8 100644 --- a/node_modules/npm/lib/commands/pkg.js +++ b/node_modules/npm/lib/commands/pkg.js @@ -11,6 +11,7 @@ class Pkg extends BaseCommand { 'delete [ ...]', 'set [[].= ...]', 'set [[].= ...]', + 'fix', ] static params = [ @@ -45,6 +46,8 @@ class Pkg extends BaseCommand { return this.set(_args) case 'delete': return this.delete(_args) + case 'fix': + return this.fix(_args) default: throw this.usageError() } @@ -136,6 +139,11 @@ class Pkg extends BaseCommand { pkgJson.update(q.toJSON()) await pkgJson.save() } + + async fix () { + const pkgJson = await PackageJson.fix(this.prefix) + await pkgJson.save() + } } module.exports = Pkg diff --git a/node_modules/npm/lib/commands/publish.js b/node_modules/npm/lib/commands/publish.js index 5470727..8d2aa9e 100644 --- a/node_modules/npm/lib/commands/publish.js +++ b/node_modules/npm/lib/commands/publish.js @@ -15,7 +15,7 @@ const { getContents, logTar } = require('../utils/tar.js') // keys that npm supports in .npmrc files and elsewhere. We *may* want to // revisit this at some point, and have a minimal set that's a SemVer-major // change that ought to get a RFC written on it. -const { flatten } = require('../utils/config/index.js') +const { flatten } = require('@npmcli/config/lib/definitions') const pkgJson = require('@npmcli/package-json') const BaseCommand = require('../base-command.js') diff --git a/node_modules/npm/lib/commands/unpublish.js b/node_modules/npm/lib/commands/unpublish.js index 6698529..402f8f3 100644 --- a/node_modules/npm/lib/commands/unpublish.js +++ b/node_modules/npm/lib/commands/unpublish.js @@ -4,7 +4,7 @@ const npa = require('npm-package-arg') const npmFetch = require('npm-registry-fetch') const pkgJson = require('@npmcli/package-json') -const { flatten } = require('../utils/config/index.js') +const { flatten } = require('@npmcli/config/lib/definitions') const getIdentity = require('../utils/get-identity.js') const log = require('../utils/log-shim') const otplease = require('../utils/otplease.js') diff --git a/node_modules/npm/lib/es6/validate-engines.js b/node_modules/npm/lib/es6/validate-engines.js index 0eaa549..cf5315a 100644 --- a/node_modules/npm/lib/es6/validate-engines.js +++ b/node_modules/npm/lib/es6/validate-engines.js @@ -9,7 +9,7 @@ const { engines: { node: engines }, version } = require('../../package.json') const npm = `v${version}` module.exports = (process, getCli) => { - const node = process.version.replace(/-.*$/, '') + const node = process.version /* eslint-disable-next-line max-len */ const unsupportedMessage = `npm ${npm} does not support Node.js ${node}. This version of npm supports the following node versions: \`${engines}\`. You can find the latest version at https://nodejs.org/.` diff --git a/node_modules/npm/lib/npm.js b/node_modules/npm/lib/npm.js index f08ef32..1470662 100644 --- a/node_modules/npm/lib/npm.js +++ b/node_modules/npm/lib/npm.js @@ -6,7 +6,7 @@ const fs = require('fs/promises') // Patch the global fs module here at the app level require('graceful-fs').gracefulify(require('fs')) -const { definitions, flatten, shorthands } = require('./utils/config/index.js') +const { definitions, flatten, shorthands } = require('@npmcli/config/lib/definitions') const usage = require('./utils/npm-usage.js') const LogFile = require('./utils/log-file.js') const Timers = require('./utils/timers.js') @@ -194,12 +194,19 @@ class Npm { await this.time('npm:load:configload', () => this.config.load()) - const { Chalk, supportsColor, supportsColorStderr } = await import('chalk') + // get createSupportsColor from chalk directly if this lands + // https://github.com/chalk/chalk/pull/600 + const [{ Chalk }, { createSupportsColor }] = await Promise.all([ + import('chalk'), + import('supports-color'), + ]) this.#noColorChalk = new Chalk({ level: 0 }) - this.#chalk = this.color ? new Chalk({ level: supportsColor.level }) - : this.#noColorChalk - this.#logChalk = this.logColor ? new Chalk({ level: supportsColorStderr.level }) - : this.#noColorChalk + // we get the chalk level based on a null stream meaning chalk will only use + // what it knows about the environment to get color support since we already + // determined in our definitions that we want to show colors. + const level = Math.max(createSupportsColor(null).level, 1) + this.#chalk = this.color ? new Chalk({ level }) : this.#noColorChalk + this.#logChalk = this.logColor ? new Chalk({ level }) : this.#noColorChalk // mkdir this separately since the logs dir can be set to // a different location. if this fails, then we don't have diff --git a/node_modules/npm/man/man1/npm-access.1 b/node_modules/npm/man/man1/npm-access.1 index 96534a0..b6266e1 100644 --- a/node_modules/npm/man/man1/npm-access.1 +++ b/node_modules/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "June 2023" "" "" +.TH "NPM-ACCESS" "1" "July 2023" "" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-adduser.1 b/node_modules/npm/man/man1/npm-adduser.1 index 3a018db..c38b625 100644 --- a/node_modules/npm/man/man1/npm-adduser.1 +++ b/node_modules/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "June 2023" "" "" +.TH "NPM-ADDUSER" "1" "July 2023" "" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-audit.1 b/node_modules/npm/man/man1/npm-audit.1 index da9a483..35fb73e 100644 --- a/node_modules/npm/man/man1/npm-audit.1 +++ b/node_modules/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "June 2023" "" "" +.TH "NPM-AUDIT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-bugs.1 b/node_modules/npm/man/man1/npm-bugs.1 index a5ddd4e..8fac524 100644 --- a/node_modules/npm/man/man1/npm-bugs.1 +++ b/node_modules/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "June 2023" "" "" +.TH "NPM-BUGS" "1" "July 2023" "" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-cache.1 b/node_modules/npm/man/man1/npm-cache.1 index 5669388..f16723a 100644 --- a/node_modules/npm/man/man1/npm-cache.1 +++ b/node_modules/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "June 2023" "" "" +.TH "NPM-CACHE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-ci.1 b/node_modules/npm/man/man1/npm-ci.1 index 05e435e..a8c6416 100644 --- a/node_modules/npm/man/man1/npm-ci.1 +++ b/node_modules/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "June 2023" "" "" +.TH "NPM-CI" "1" "July 2023" "" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-completion.1 b/node_modules/npm/man/man1/npm-completion.1 index d72df72..fbf5a68 100644 --- a/node_modules/npm/man/man1/npm-completion.1 +++ b/node_modules/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "June 2023" "" "" +.TH "NPM-COMPLETION" "1" "July 2023" "" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-config.1 b/node_modules/npm/man/man1/npm-config.1 index 4b19fa0..77fd28d 100644 --- a/node_modules/npm/man/man1/npm-config.1 +++ b/node_modules/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "June 2023" "" "" +.TH "NPM-CONFIG" "1" "July 2023" "" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-dedupe.1 b/node_modules/npm/man/man1/npm-dedupe.1 index 56e57db..bb97e32 100644 --- a/node_modules/npm/man/man1/npm-dedupe.1 +++ b/node_modules/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "June 2023" "" "" +.TH "NPM-DEDUPE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-deprecate.1 b/node_modules/npm/man/man1/npm-deprecate.1 index 1458a88..36b97e3 100644 --- a/node_modules/npm/man/man1/npm-deprecate.1 +++ b/node_modules/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "June 2023" "" "" +.TH "NPM-DEPRECATE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-diff.1 b/node_modules/npm/man/man1/npm-diff.1 index 44df189..99bbc2f 100644 --- a/node_modules/npm/man/man1/npm-diff.1 +++ b/node_modules/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "June 2023" "" "" +.TH "NPM-DIFF" "1" "July 2023" "" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-dist-tag.1 b/node_modules/npm/man/man1/npm-dist-tag.1 index 11e5010..16ebcdb 100644 --- a/node_modules/npm/man/man1/npm-dist-tag.1 +++ b/node_modules/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "June 2023" "" "" +.TH "NPM-DIST-TAG" "1" "July 2023" "" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-docs.1 b/node_modules/npm/man/man1/npm-docs.1 index dd701e9..779b891 100644 --- a/node_modules/npm/man/man1/npm-docs.1 +++ b/node_modules/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "June 2023" "" "" +.TH "NPM-DOCS" "1" "July 2023" "" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-doctor.1 b/node_modules/npm/man/man1/npm-doctor.1 index b1d36f3..a1eef65 100644 --- a/node_modules/npm/man/man1/npm-doctor.1 +++ b/node_modules/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "June 2023" "" "" +.TH "NPM-DOCTOR" "1" "July 2023" "" "" .SH "NAME" \fBnpm-doctor\fR - Check your npm environment .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-edit.1 b/node_modules/npm/man/man1/npm-edit.1 index 140946f..ee7b76b 100644 --- a/node_modules/npm/man/man1/npm-edit.1 +++ b/node_modules/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "June 2023" "" "" +.TH "NPM-EDIT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-exec.1 b/node_modules/npm/man/man1/npm-exec.1 index d6efae1..9e6af78 100644 --- a/node_modules/npm/man/man1/npm-exec.1 +++ b/node_modules/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "June 2023" "" "" +.TH "NPM-EXEC" "1" "July 2023" "" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-explain.1 b/node_modules/npm/man/man1/npm-explain.1 index 1a32c14..9985e3c 100644 --- a/node_modules/npm/man/man1/npm-explain.1 +++ b/node_modules/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "June 2023" "" "" +.TH "NPM-EXPLAIN" "1" "July 2023" "" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-explore.1 b/node_modules/npm/man/man1/npm-explore.1 index 2e9549c..ee3ee6b 100644 --- a/node_modules/npm/man/man1/npm-explore.1 +++ b/node_modules/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "June 2023" "" "" +.TH "NPM-EXPLORE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-find-dupes.1 b/node_modules/npm/man/man1/npm-find-dupes.1 index 13d9d3a..9d37363 100644 --- a/node_modules/npm/man/man1/npm-find-dupes.1 +++ b/node_modules/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "June 2023" "" "" +.TH "NPM-FIND-DUPES" "1" "July 2023" "" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-fund.1 b/node_modules/npm/man/man1/npm-fund.1 index 074cd07..794d043 100644 --- a/node_modules/npm/man/man1/npm-fund.1 +++ b/node_modules/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "June 2023" "" "" +.TH "NPM-FUND" "1" "July 2023" "" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-help-search.1 b/node_modules/npm/man/man1/npm-help-search.1 index 1de2840..0f85ec2 100644 --- a/node_modules/npm/man/man1/npm-help-search.1 +++ b/node_modules/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "June 2023" "" "" +.TH "NPM-HELP-SEARCH" "1" "July 2023" "" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-help.1 b/node_modules/npm/man/man1/npm-help.1 index 340db54..9226fac 100644 --- a/node_modules/npm/man/man1/npm-help.1 +++ b/node_modules/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "June 2023" "" "" +.TH "NPM-HELP" "1" "July 2023" "" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-hook.1 b/node_modules/npm/man/man1/npm-hook.1 index 4dfab8c..df6ff9f 100644 --- a/node_modules/npm/man/man1/npm-hook.1 +++ b/node_modules/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "June 2023" "" "" +.TH "NPM-HOOK" "1" "July 2023" "" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-init.1 b/node_modules/npm/man/man1/npm-init.1 index d74c57e..7a6722b 100644 --- a/node_modules/npm/man/man1/npm-init.1 +++ b/node_modules/npm/man/man1/npm-init.1 @@ -1,4 +1,4 @@ -.TH "NPM-INIT" "1" "June 2023" "" "" +.TH "NPM-INIT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-install-ci-test.1 b/node_modules/npm/man/man1/npm-install-ci-test.1 index 0aac5f1..306c5e3 100644 --- a/node_modules/npm/man/man1/npm-install-ci-test.1 +++ b/node_modules/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "June 2023" "" "" +.TH "NPM-INSTALL-CI-TEST" "1" "July 2023" "" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-install-test.1 b/node_modules/npm/man/man1/npm-install-test.1 index b660133..47dfcea 100644 --- a/node_modules/npm/man/man1/npm-install-test.1 +++ b/node_modules/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "June 2023" "" "" +.TH "NPM-INSTALL-TEST" "1" "July 2023" "" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" @@ -141,6 +141,20 @@ Type: Boolean .P If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true. +.SS "\fBpackage-lock-only\fR" +.RS 0 +.IP \(bu 4 +Default: false +.IP \(bu 4 +Type: Boolean +.RE 0 + +.P +If set to true, the current operation will only use the \fBpackage-lock.json\fR, ignoring \fBnode_modules\fR. +.P +For \fBupdate\fR this means only the \fBpackage-lock.json\fR will be updated, instead of checking \fBnode_modules\fR and downloading dependencies. +.P +For \fBlist\fR this means the output will be based on the tree described by the \fBpackage-lock.json\fR, rather than the contents of \fBnode_modules\fR. .SS "\fBforeground-scripts\fR" .RS 0 .IP \(bu 4 diff --git a/node_modules/npm/man/man1/npm-install.1 b/node_modules/npm/man/man1/npm-install.1 index e8a019a..1e93c6d 100644 --- a/node_modules/npm/man/man1/npm-install.1 +++ b/node_modules/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "June 2023" "" "" +.TH "NPM-INSTALL" "1" "July 2023" "" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -503,6 +503,20 @@ Type: Boolean .P If set to false, then ignore \fBpackage-lock.json\fR files when installing. This will also prevent \fIwriting\fR \fBpackage-lock.json\fR if \fBsave\fR is true. +.SS "\fBpackage-lock-only\fR" +.RS 0 +.IP \(bu 4 +Default: false +.IP \(bu 4 +Type: Boolean +.RE 0 + +.P +If set to true, the current operation will only use the \fBpackage-lock.json\fR, ignoring \fBnode_modules\fR. +.P +For \fBupdate\fR this means only the \fBpackage-lock.json\fR will be updated, instead of checking \fBnode_modules\fR and downloading dependencies. +.P +For \fBlist\fR this means the output will be based on the tree described by the \fBpackage-lock.json\fR, rather than the contents of \fBnode_modules\fR. .SS "\fBforeground-scripts\fR" .RS 0 .IP \(bu 4 diff --git a/node_modules/npm/man/man1/npm-link.1 b/node_modules/npm/man/man1/npm-link.1 index 38df239..9494902 100644 --- a/node_modules/npm/man/man1/npm-link.1 +++ b/node_modules/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "June 2023" "" "" +.TH "NPM-LINK" "1" "July 2023" "" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-login.1 b/node_modules/npm/man/man1/npm-login.1 index f593dbb..fb07b49 100644 --- a/node_modules/npm/man/man1/npm-login.1 +++ b/node_modules/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "June 2023" "" "" +.TH "NPM-LOGIN" "1" "July 2023" "" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-logout.1 b/node_modules/npm/man/man1/npm-logout.1 index 1f3fbb5..9ee817a 100644 --- a/node_modules/npm/man/man1/npm-logout.1 +++ b/node_modules/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "June 2023" "" "" +.TH "NPM-LOGOUT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-ls.1 b/node_modules/npm/man/man1/npm-ls.1 index 4696fc2..18be3b1 100644 --- a/node_modules/npm/man/man1/npm-ls.1 +++ b/node_modules/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "June 2023" "" "" +.TH "NPM-LS" "1" "July 2023" "" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@9.7.1 /path/to/npm +npm@9.8.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/node_modules/npm/man/man1/npm-org.1 b/node_modules/npm/man/man1/npm-org.1 index 0d1d445..f458489 100644 --- a/node_modules/npm/man/man1/npm-org.1 +++ b/node_modules/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "June 2023" "" "" +.TH "NPM-ORG" "1" "July 2023" "" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-outdated.1 b/node_modules/npm/man/man1/npm-outdated.1 index b1665a7..0c5d218 100644 --- a/node_modules/npm/man/man1/npm-outdated.1 +++ b/node_modules/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "June 2023" "" "" +.TH "NPM-OUTDATED" "1" "July 2023" "" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-owner.1 b/node_modules/npm/man/man1/npm-owner.1 index 16a6feb..b2d1940 100644 --- a/node_modules/npm/man/man1/npm-owner.1 +++ b/node_modules/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "June 2023" "" "" +.TH "NPM-OWNER" "1" "July 2023" "" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-pack.1 b/node_modules/npm/man/man1/npm-pack.1 index 7803db5..38869ef 100644 --- a/node_modules/npm/man/man1/npm-pack.1 +++ b/node_modules/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "June 2023" "" "" +.TH "NPM-PACK" "1" "July 2023" "" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-ping.1 b/node_modules/npm/man/man1/npm-ping.1 index 568a752..fdbc131 100644 --- a/node_modules/npm/man/man1/npm-ping.1 +++ b/node_modules/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "June 2023" "" "" +.TH "NPM-PING" "1" "July 2023" "" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-pkg.1 b/node_modules/npm/man/man1/npm-pkg.1 index ec10aec..806a5ae 100644 --- a/node_modules/npm/man/man1/npm-pkg.1 +++ b/node_modules/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "June 2023" "" "" +.TH "NPM-PKG" "1" "July 2023" "" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" @@ -10,6 +10,7 @@ npm pkg get \[lB] \[lB] ...\[rB]\[rB] npm pkg delete \[lB] ...\[rB] npm pkg set \[lB]\[lB]\[rB].= ...\[rB] npm pkg set \[lB]\[lB]\[rB].= ...\[rB] +npm pkg fix .fi .RE .SS "Description" @@ -138,6 +139,10 @@ The same syntax used to set values from your package can also be used to remove npm pkg delete scripts.build .fi .RE +.IP \(bu 4 +\fBnpm pkg fix\fR +.P +Auto corrects common errors in your \fBpackage.json\fR. npm already does this during \fBpublish\fR, which leads to subtle (mostly harmless) differences between the contents of your \fBpackage.json\fR file and the manifest that npm uses during installation. .RE 0 .SS "Workspaces support" diff --git a/node_modules/npm/man/man1/npm-prefix.1 b/node_modules/npm/man/man1/npm-prefix.1 index 9b24982..764e9b9 100644 --- a/node_modules/npm/man/man1/npm-prefix.1 +++ b/node_modules/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "June 2023" "" "" +.TH "NPM-PREFIX" "1" "July 2023" "" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-profile.1 b/node_modules/npm/man/man1/npm-profile.1 index cc5daf3..47c7d2e 100644 --- a/node_modules/npm/man/man1/npm-profile.1 +++ b/node_modules/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "June 2023" "" "" +.TH "NPM-PROFILE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-prune.1 b/node_modules/npm/man/man1/npm-prune.1 index d724cc5..fd4492f 100644 --- a/node_modules/npm/man/man1/npm-prune.1 +++ b/node_modules/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "June 2023" "" "" +.TH "NPM-PRUNE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-publish.1 b/node_modules/npm/man/man1/npm-publish.1 index bb31539..888977f 100644 --- a/node_modules/npm/man/man1/npm-publish.1 +++ b/node_modules/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "June 2023" "" "" +.TH "NPM-PUBLISH" "1" "July 2023" "" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-query.1 b/node_modules/npm/man/man1/npm-query.1 index 84dcbb9..e8bda25 100644 --- a/node_modules/npm/man/man1/npm-query.1 +++ b/node_modules/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "June 2023" "" "" +.TH "NPM-QUERY" "1" "July 2023" "" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-rebuild.1 b/node_modules/npm/man/man1/npm-rebuild.1 index e0f8c8e..4d7644f 100644 --- a/node_modules/npm/man/man1/npm-rebuild.1 +++ b/node_modules/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "June 2023" "" "" +.TH "NPM-REBUILD" "1" "July 2023" "" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-repo.1 b/node_modules/npm/man/man1/npm-repo.1 index 91d8176..233ae1c 100644 --- a/node_modules/npm/man/man1/npm-repo.1 +++ b/node_modules/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "June 2023" "" "" +.TH "NPM-REPO" "1" "July 2023" "" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-restart.1 b/node_modules/npm/man/man1/npm-restart.1 index 0caa8f7..5df4da8 100644 --- a/node_modules/npm/man/man1/npm-restart.1 +++ b/node_modules/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "June 2023" "" "" +.TH "NPM-RESTART" "1" "July 2023" "" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-root.1 b/node_modules/npm/man/man1/npm-root.1 index e640d17..9d7f65b 100644 --- a/node_modules/npm/man/man1/npm-root.1 +++ b/node_modules/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "June 2023" "" "" +.TH "NPM-ROOT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-run-script.1 b/node_modules/npm/man/man1/npm-run-script.1 index cdd5e4a..2b458fc 100644 --- a/node_modules/npm/man/man1/npm-run-script.1 +++ b/node_modules/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "June 2023" "" "" +.TH "NPM-RUN-SCRIPT" "1" "July 2023" "" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-search.1 b/node_modules/npm/man/man1/npm-search.1 index f26ddca..30a9e7c 100644 --- a/node_modules/npm/man/man1/npm-search.1 +++ b/node_modules/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "June 2023" "" "" +.TH "NPM-SEARCH" "1" "July 2023" "" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-shrinkwrap.1 b/node_modules/npm/man/man1/npm-shrinkwrap.1 index 0d64164..932c73f 100644 --- a/node_modules/npm/man/man1/npm-shrinkwrap.1 +++ b/node_modules/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "June 2023" "" "" +.TH "NPM-SHRINKWRAP" "1" "July 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-star.1 b/node_modules/npm/man/man1/npm-star.1 index 2bbd7b3..83bec12 100644 --- a/node_modules/npm/man/man1/npm-star.1 +++ b/node_modules/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "June 2023" "" "" +.TH "NPM-STAR" "1" "July 2023" "" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-stars.1 b/node_modules/npm/man/man1/npm-stars.1 index 3f8721a..7651506 100644 --- a/node_modules/npm/man/man1/npm-stars.1 +++ b/node_modules/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "June 2023" "" "" +.TH "NPM-STARS" "1" "July 2023" "" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-start.1 b/node_modules/npm/man/man1/npm-start.1 index 27c0956..6d3fa76 100644 --- a/node_modules/npm/man/man1/npm-start.1 +++ b/node_modules/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "June 2023" "" "" +.TH "NPM-START" "1" "July 2023" "" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-stop.1 b/node_modules/npm/man/man1/npm-stop.1 index 14742a9..54611e3 100644 --- a/node_modules/npm/man/man1/npm-stop.1 +++ b/node_modules/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "June 2023" "" "" +.TH "NPM-STOP" "1" "July 2023" "" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-team.1 b/node_modules/npm/man/man1/npm-team.1 index abed5a5..7b806f4 100644 --- a/node_modules/npm/man/man1/npm-team.1 +++ b/node_modules/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "June 2023" "" "" +.TH "NPM-TEAM" "1" "July 2023" "" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-test.1 b/node_modules/npm/man/man1/npm-test.1 index 7f4945b..5e02ed4 100644 --- a/node_modules/npm/man/man1/npm-test.1 +++ b/node_modules/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "June 2023" "" "" +.TH "NPM-TEST" "1" "July 2023" "" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-token.1 b/node_modules/npm/man/man1/npm-token.1 index a9aeb2f..242c82f 100644 --- a/node_modules/npm/man/man1/npm-token.1 +++ b/node_modules/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "June 2023" "" "" +.TH "NPM-TOKEN" "1" "July 2023" "" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-uninstall.1 b/node_modules/npm/man/man1/npm-uninstall.1 index b35a578..d89488f 100644 --- a/node_modules/npm/man/man1/npm-uninstall.1 +++ b/node_modules/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "June 2023" "" "" +.TH "NPM-UNINSTALL" "1" "July 2023" "" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-unpublish.1 b/node_modules/npm/man/man1/npm-unpublish.1 index 151d24b..faa9bd2 100644 --- a/node_modules/npm/man/man1/npm-unpublish.1 +++ b/node_modules/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "June 2023" "" "" +.TH "NPM-UNPUBLISH" "1" "July 2023" "" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-unstar.1 b/node_modules/npm/man/man1/npm-unstar.1 index 802ccf6..157e7f5 100644 --- a/node_modules/npm/man/man1/npm-unstar.1 +++ b/node_modules/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "June 2023" "" "" +.TH "NPM-UNSTAR" "1" "July 2023" "" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-update.1 b/node_modules/npm/man/man1/npm-update.1 index 787e2ee..c72b717 100644 --- a/node_modules/npm/man/man1/npm-update.1 +++ b/node_modules/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "June 2023" "" "" +.TH "NPM-UPDATE" "1" "July 2023" "" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-version.1 b/node_modules/npm/man/man1/npm-version.1 index 3a88228..4827272 100644 --- a/node_modules/npm/man/man1/npm-version.1 +++ b/node_modules/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "June 2023" "" "" +.TH "NPM-VERSION" "1" "July 2023" "" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-view.1 b/node_modules/npm/man/man1/npm-view.1 index 50b8dcb..ea6fdfe 100644 --- a/node_modules/npm/man/man1/npm-view.1 +++ b/node_modules/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "June 2023" "" "" +.TH "NPM-VIEW" "1" "July 2023" "" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm-whoami.1 b/node_modules/npm/man/man1/npm-whoami.1 index 7bc53ca..799d85f 100644 --- a/node_modules/npm/man/man1/npm-whoami.1 +++ b/node_modules/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "June 2023" "" "" +.TH "NPM-WHOAMI" "1" "July 2023" "" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/node_modules/npm/man/man1/npm.1 b/node_modules/npm/man/man1/npm.1 index 7e6e660..114d4de 100644 --- a/node_modules/npm/man/man1/npm.1 +++ b/node_modules/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "June 2023" "" "" +.TH "NPM" "1" "July 2023" "" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -9.7.1 +9.8.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/node_modules/npm/man/man1/npx.1 b/node_modules/npm/man/man1/npx.1 index 6254530..f1c9b4c 100644 --- a/node_modules/npm/man/man1/npx.1 +++ b/node_modules/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "June 2023" "" "" +.TH "NPX" "1" "July 2023" "" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/node_modules/npm/man/man5/folders.5 b/node_modules/npm/man/man5/folders.5 index c4ddf51..3661e0b 100644 --- a/node_modules/npm/man/man5/folders.5 +++ b/node_modules/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "June 2023" "" "" +.TH "FOLDERS" "5" "July 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/node_modules/npm/man/man5/install.5 b/node_modules/npm/man/man5/install.5 index 096c705..efbbdcc 100644 --- a/node_modules/npm/man/man5/install.5 +++ b/node_modules/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "June 2023" "" "" +.TH "INSTALL" "5" "July 2023" "" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/node_modules/npm/man/man5/npm-global.5 b/node_modules/npm/man/man5/npm-global.5 index c4ddf51..3661e0b 100644 --- a/node_modules/npm/man/man5/npm-global.5 +++ b/node_modules/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "June 2023" "" "" +.TH "FOLDERS" "5" "July 2023" "" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/node_modules/npm/man/man5/npm-json.5 b/node_modules/npm/man/man5/npm-json.5 index 36e2271..f1e5784 100644 --- a/node_modules/npm/man/man5/npm-json.5 +++ b/node_modules/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "June 2023" "" "" +.TH "PACKAGE.JSON" "5" "July 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -244,8 +244,6 @@ Some special files and directories are also included or excluded regardless of w .P You can also provide a \fB.npmignore\fR file in the root of your package or in subdirectories, which will keep files from being included. At the root of your package it will not override the "files" field, but in subdirectories it will. The \fB.npmignore\fR file works just like a \fB.gitignore\fR. If there is a \fB.gitignore\fR file, and \fB.npmignore\fR is missing, \fB.gitignore\fR's contents will be used instead. .P -Files included with the "package.json#files" field \fIcannot\fR be excluded through \fB.npmignore\fR or \fB.gitignore\fR. -.P Certain files are always included, regardless of settings: .RS 0 .IP \(bu 4 diff --git a/node_modules/npm/man/man5/npm-shrinkwrap-json.5 b/node_modules/npm/man/man5/npm-shrinkwrap-json.5 index 3ac345a..cf3e37e 100644 --- a/node_modules/npm/man/man5/npm-shrinkwrap-json.5 +++ b/node_modules/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "June 2023" "" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "July 2023" "" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/node_modules/npm/man/man5/npmrc.5 b/node_modules/npm/man/man5/npmrc.5 index 8aaab33..7b222d3 100644 --- a/node_modules/npm/man/man5/npmrc.5 +++ b/node_modules/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "June 2023" "" "" +.TH "NPMRC" "5" "July 2023" "" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/node_modules/npm/man/man5/package-json.5 b/node_modules/npm/man/man5/package-json.5 index 36e2271..f1e5784 100644 --- a/node_modules/npm/man/man5/package-json.5 +++ b/node_modules/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "June 2023" "" "" +.TH "PACKAGE.JSON" "5" "July 2023" "" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -244,8 +244,6 @@ Some special files and directories are also included or excluded regardless of w .P You can also provide a \fB.npmignore\fR file in the root of your package or in subdirectories, which will keep files from being included. At the root of your package it will not override the "files" field, but in subdirectories it will. The \fB.npmignore\fR file works just like a \fB.gitignore\fR. If there is a \fB.gitignore\fR file, and \fB.npmignore\fR is missing, \fB.gitignore\fR's contents will be used instead. .P -Files included with the "package.json#files" field \fIcannot\fR be excluded through \fB.npmignore\fR or \fB.gitignore\fR. -.P Certain files are always included, regardless of settings: .RS 0 .IP \(bu 4 diff --git a/node_modules/npm/man/man5/package-lock-json.5 b/node_modules/npm/man/man5/package-lock-json.5 index ceac876..82435a4 100644 --- a/node_modules/npm/man/man5/package-lock-json.5 +++ b/node_modules/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "June 2023" "" "" +.TH "PACKAGE-LOCK.JSON" "5" "July 2023" "" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/node_modules/npm/man/man7/config.7 b/node_modules/npm/man/man7/config.7 index cec41d8..3bcd607 100644 --- a/node_modules/npm/man/man7/config.7 +++ b/node_modules/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "June 2023" "" "" +.TH "CONFIG" "7" "July 2023" "" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" diff --git a/node_modules/npm/man/man7/dependency-selectors.7 b/node_modules/npm/man/man7/dependency-selectors.7 index 74767ec..8e557ef 100644 --- a/node_modules/npm/man/man7/dependency-selectors.7 +++ b/node_modules/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "June 2023" "" "" +.TH "QUERYING" "7" "July 2023" "" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/node_modules/npm/man/man7/developers.7 b/node_modules/npm/man/man7/developers.7 index 30c2c60..788ed4b 100644 --- a/node_modules/npm/man/man7/developers.7 +++ b/node_modules/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "June 2023" "" "" +.TH "DEVELOPERS" "7" "July 2023" "" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/node_modules/npm/man/man7/logging.7 b/node_modules/npm/man/man7/logging.7 index 62ae6b1..0c96f75 100644 --- a/node_modules/npm/man/man7/logging.7 +++ b/node_modules/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "June 2023" "" "" +.TH "LOGGING" "7" "July 2023" "" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/node_modules/npm/man/man7/orgs.7 b/node_modules/npm/man/man7/orgs.7 index 6ebfc55..2d0ec91 100644 --- a/node_modules/npm/man/man7/orgs.7 +++ b/node_modules/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "June 2023" "" "" +.TH "ORGS" "7" "July 2023" "" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/node_modules/npm/man/man7/package-spec.7 b/node_modules/npm/man/man7/package-spec.7 index 0e3ff55..2d02001 100644 --- a/node_modules/npm/man/man7/package-spec.7 +++ b/node_modules/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "June 2023" "" "" +.TH "PACKAGE-SPEC" "7" "July 2023" "" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/node_modules/npm/man/man7/registry.7 b/node_modules/npm/man/man7/registry.7 index db66480..9b68a2a 100644 --- a/node_modules/npm/man/man7/registry.7 +++ b/node_modules/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "June 2023" "" "" +.TH "REGISTRY" "7" "July 2023" "" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/node_modules/npm/man/man7/removal.7 b/node_modules/npm/man/man7/removal.7 index dfd9542..1ae685b 100644 --- a/node_modules/npm/man/man7/removal.7 +++ b/node_modules/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "June 2023" "" "" +.TH "REMOVAL" "7" "July 2023" "" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/node_modules/npm/man/man7/scope.7 b/node_modules/npm/man/man7/scope.7 index ac9cd5f..04dc80f 100644 --- a/node_modules/npm/man/man7/scope.7 +++ b/node_modules/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "June 2023" "" "" +.TH "SCOPE" "7" "July 2023" "" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/node_modules/npm/man/man7/scripts.7 b/node_modules/npm/man/man7/scripts.7 index 72012d4..043b296 100644 --- a/node_modules/npm/man/man7/scripts.7 +++ b/node_modules/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "June 2023" "" "" +.TH "SCRIPTS" "7" "July 2023" "" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/node_modules/npm/man/man7/workspaces.7 b/node_modules/npm/man/man7/workspaces.7 index 68dca1c..8f11ac9 100644 --- a/node_modules/npm/man/man7/workspaces.7 +++ b/node_modules/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "June 2023" "" "" +.TH "WORKSPACES" "7" "July 2023" "" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/node_modules/npm/node_modules/@gar/promisify/LICENSE.md b/node_modules/npm/node_modules/@gar/promisify/LICENSE.md deleted file mode 100644 index 64f7732..0000000 --- a/node_modules/npm/node_modules/@gar/promisify/LICENSE.md +++ /dev/null @@ -1,10 +0,0 @@ -The MIT License (MIT) - -Copyright © 2020-2022 Michael Garvin - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/node_modules/npm/node_modules/@gar/promisify/index.js b/node_modules/npm/node_modules/@gar/promisify/index.js deleted file mode 100644 index d0be95f..0000000 --- a/node_modules/npm/node_modules/@gar/promisify/index.js +++ /dev/null @@ -1,36 +0,0 @@ -'use strict' - -const { promisify } = require('util') - -const handler = { - get: function (target, prop, receiver) { - if (typeof target[prop] !== 'function') { - return target[prop] - } - if (target[prop][promisify.custom]) { - return function () { - return Reflect.get(target, prop, receiver)[promisify.custom].apply(target, arguments) - } - } - return function () { - return new Promise((resolve, reject) => { - Reflect.get(target, prop, receiver).apply(target, [...arguments, function (err, result) { - if (err) { - return reject(err) - } - resolve(result) - }]) - }) - } - } -} - -module.exports = function (thingToPromisify) { - if (typeof thingToPromisify === 'function') { - return promisify(thingToPromisify) - } - if (typeof thingToPromisify === 'object') { - return new Proxy(thingToPromisify, handler) - } - throw new TypeError('Can only promisify functions or objects') -} diff --git a/node_modules/npm/node_modules/@gar/promisify/package.json b/node_modules/npm/node_modules/@gar/promisify/package.json deleted file mode 100644 index d0ce69b..0000000 --- a/node_modules/npm/node_modules/@gar/promisify/package.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "name": "@gar/promisify", - "version": "1.1.3", - "description": "Promisify an entire class or object", - "main": "index.js", - "repository": { - "type": "git", - "url": "https://github.com/wraithgar/gar-promisify.git" - }, - "scripts": { - "lint": "standard", - "lint:fix": "standard --fix", - "test": "lab -a @hapi/code -t 100", - "posttest": "npm run lint" - }, - "files": [ - "index.js" - ], - "keywords": [ - "promisify", - "all", - "class", - "object" - ], - "author": "Gar ", - "license": "MIT", - "devDependencies": { - "@hapi/code": "^8.0.1", - "@hapi/lab": "^24.1.0", - "standard": "^16.0.3" - } -} diff --git a/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js b/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js index ef3c095..ba19750 100644 --- a/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js +++ b/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js @@ -1,9 +1,14 @@ import ansiRegex from 'ansi-regex'; +const regex = ansiRegex(); + export default function stripAnsi(string) { if (typeof string !== 'string') { throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); } - return string.replace(ansiRegex(), ''); + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); } diff --git a/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json index 0de0586..e1f455c 100644 --- a/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json +++ b/node_modules/npm/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json @@ -1,6 +1,6 @@ { "name": "strip-ansi", - "version": "7.0.1", + "version": "7.1.0", "description": "Strip ANSI escape codes from a string", "license": "MIT", "repository": "chalk/strip-ansi", diff --git a/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js b/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js index 0057d38..020038b 100644 --- a/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js +++ b/node_modules/npm/node_modules/@npmcli/arborist/lib/arborist/reify.js @@ -483,17 +483,29 @@ module.exports = cls => class Reifier extends cls { process.emit('time', 'reify:trashOmits') - const filter = node => - node.top.isProjectRoot && - ( - node.peer && this[_omitPeer] || - node.dev && this[_omitDev] || - node.optional && this[_omitOptional] || - node.devOptional && this[_omitOptional] && this[_omitDev] - ) - - for (const node of this.idealTree.inventory.filter(filter)) { - this[_addNodeToTrashList](node) + for (const node of this.idealTree.inventory.values()) { + const { top } = node + + // if the top is not the root or workspace then we do not want to omit it + if (!top.isProjectRoot && !top.isWorkspace) { + continue + } + + // if a diff filter has been created, then we do not omit the node if the + // top node is not in that set + if (this.diff?.filterSet?.size && !this.diff.filterSet.has(top)) { + continue + } + + // omit node if the dep type matches any omit flags that were set + if ( + node.peer && this[_omitPeer] || + node.dev && this[_omitDev] || + node.optional && this[_omitOptional] || + node.devOptional && this[_omitOptional] && this[_omitDev] + ) { + this[_addNodeToTrashList](node) + } } process.emit('timeEnd', 'reify:trashOmits') @@ -1411,8 +1423,7 @@ module.exports = cls => class Reifier extends cls { for (const tree of updatedTrees) { // refresh the edges so they have the correct specs tree.package = tree.package - const pkgJson = await PackageJson.load(tree.path) - .catch(() => new PackageJson(tree.path)) + const pkgJson = await PackageJson.load(tree.path, { create: true }) const { dependencies = {}, devDependencies = {}, diff --git a/node_modules/npm/node_modules/@npmcli/arborist/package.json b/node_modules/npm/node_modules/@npmcli/arborist/package.json index ec98a3c..712d01b 100644 --- a/node_modules/npm/node_modules/@npmcli/arborist/package.json +++ b/node_modules/npm/node_modules/@npmcli/arborist/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/arborist", - "version": "6.2.9", + "version": "6.3.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", @@ -10,7 +10,7 @@ "@npmcli/metavuln-calculator": "^5.0.0", "@npmcli/name-from-folder": "^2.0.0", "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^3.0.0", + "@npmcli/package-json": "^4.0.0", "@npmcli/query": "^3.0.0", "@npmcli/run-script": "^6.0.0", "bin-links": "^4.0.1", @@ -74,18 +74,12 @@ "bin": { "arborist": "bin/index.js" }, - "//": "sk test-env locale to catch locale-specific sorting", "tap": { - "color": true, "after": "test/fixtures/cleanup.js", "test-env": [ - "NODE_OPTIONS=--no-warnings", "LC_ALL=sk" ], - "node-arg": [ - "--no-warnings", - "--no-deprecation" - ], + "color": 1, "timeout": "360", "nyc-arg": [ "--exclude", diff --git a/node_modules/npm/lib/utils/config/definition.js b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js similarity index 97% rename from node_modules/npm/lib/utils/config/definition.js rename to node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js index 54e522c..333a919 100644 --- a/node_modules/npm/lib/utils/config/definition.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definition.js @@ -25,13 +25,11 @@ const allowed = [ ] const { - typeDefs: { - semver: { type: semver }, - Umask: { type: Umask }, - url: { type: url }, - path: { type: path }, - }, -} = require('@npmcli/config') + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, +} = require('../type-defs.js') class Definition { constructor (key, def) { diff --git a/node_modules/npm/lib/utils/config/definitions.js b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js similarity index 99% rename from node_modules/npm/lib/utils/config/definitions.js rename to node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js index f86c3dd..fe5cafa 100644 --- a/node_modules/npm/lib/utils/config/definitions.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/definitions.js @@ -3,12 +3,12 @@ module.exports = definitions const Definition = require('./definition.js') -const { version: npmVersion } = require('../../../package.json') const ciInfo = require('ci-info') const querystring = require('querystring') -const { isWindows } = require('../is-windows.js') const { join } = require('path') +const isWindows = process.platform === 'win32' + // used by cafile flattening to flatOptions.ca const fs = require('fs') const maybeReadFile = file => { @@ -87,19 +87,15 @@ const cacheRoot = (isWindows && process.env.LOCALAPPDATA) || '~' const cacheExtra = isWindows ? 'npm-cache' : '.npm' const cache = `${cacheRoot}/${cacheExtra}` -const Config = require('@npmcli/config') - // TODO: refactor these type definitions so that they are less // weird to pull out of the config module. // TODO: use better type definition/validation API, nopt's is so weird. const { - typeDefs: { - semver: { type: semver }, - Umask: { type: Umask }, - url: { type: url }, - path: { type: path }, - }, -} = Config + semver: { type: semver }, + Umask: { type: Umask }, + url: { type: url }, + path: { type: path }, +} = require('../type-defs.js') const define = (key, def) => { /* istanbul ignore if - this should never happen, prevents mistakes below */ @@ -2233,7 +2229,7 @@ define('user-agent', { } flatOptions.userAgent = value.replace(/\{node-version\}/gi, process.version) - .replace(/\{npm-version\}/gi, npmVersion) + .replace(/\{npm-version\}/gi, obj['npm-version']) .replace(/\{platform\}/gi, process.platform) .replace(/\{arch\}/gi, process.arch) .replace(/\{workspaces\}/gi, inWorkspaces) diff --git a/node_modules/npm/lib/utils/config/index.js b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js similarity index 86% rename from node_modules/npm/lib/utils/config/index.js rename to node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js index d393aec..748f306 100644 --- a/node_modules/npm/lib/utils/config/index.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/definitions/index.js @@ -31,6 +31,16 @@ const flatten = (obj, flat = {}) => { return flat } +const definitionProps = Object.entries(definitions) + .reduce((acc, [key, { short = [], default: d }]) => { + // can be either an array or string + for (const s of [].concat(short)) { + acc.shorthands[s] = [`--${key}`] + } + acc.defaults[key] = d + return acc + }, { shorthands: {}, defaults: {} }) + // aliases where they get expanded into a completely different thing // these are NOT supported in the environment or npmrc files, only // expanded on the CLI. @@ -55,23 +65,11 @@ const shorthands = { readonly: ['--read-only'], reg: ['--registry'], iwr: ['--include-workspace-root'], - ...Object.entries(definitions).reduce((acc, [key, { short = [] }]) => { - // can be either an array or string - for (const s of [].concat(short)) { - acc[s] = [`--${key}`] - } - return acc - }, {}), + ...definitionProps.shorthands, } module.exports = { - get defaults () { - // NB: 'default' is a reserved word - return Object.entries(definitions).reduce((acc, [key, { default: d }]) => { - acc[key] = d - return acc - }, {}) - }, + defaults: definitionProps.defaults, definitions, flatten, shorthands, diff --git a/node_modules/npm/node_modules/@npmcli/config/lib/errors.js b/node_modules/npm/node_modules/@npmcli/config/lib/errors.js index fa3e207..6161509 100644 --- a/node_modules/npm/node_modules/@npmcli/config/lib/errors.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/errors.js @@ -4,6 +4,7 @@ class ErrInvalidAuth extends Error { constructor (problems) { let message = 'Invalid auth configuration found: ' message += problems.map((problem) => { + // istanbul ignore else if (problem.action === 'delete') { return `\`${problem.key}\` is not allowed in ${problem.where} config` } else if (problem.action === 'rename') { diff --git a/node_modules/npm/node_modules/@npmcli/config/lib/index.js b/node_modules/npm/node_modules/@npmcli/config/lib/index.js index 84a0098..0e19d32 100644 --- a/node_modules/npm/node_modules/@npmcli/config/lib/index.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/index.js @@ -305,10 +305,20 @@ class Config { this.loadGlobalPrefix() this.loadHome() - this.#loadObject({ + const defaultsObject = { ...this.defaults, prefix: this.globalPrefix, - }, 'default', 'default values') + } + + try { + defaultsObject['npm-version'] = require(join(this.npmPath, 'package.json')).version + } catch { + // in some weird state where the passed in npmPath does not have a package.json + // this will never happen in npm, but is guarded here in case this is consumed + // in other ways + tests + } + + this.#loadObject(defaultsObject, 'default', 'default values') const { data } = this.data.get('default') @@ -446,7 +456,7 @@ class Config { nopt.invalidHandler = (k, val, type) => this.invalidHandler(k, val, type, obj.source, where) - nopt.clean(obj.data, this.types, this.typeDefs) + nopt.clean(obj.data, this.types, typeDefs) nopt.invalidHandler = null return obj[_valid] diff --git a/node_modules/npm/node_modules/@npmcli/config/lib/umask.js b/node_modules/npm/node_modules/@npmcli/config/lib/umask.js index 195fad2..4d9ebbd 100644 --- a/node_modules/npm/node_modules/@npmcli/config/lib/umask.js +++ b/node_modules/npm/node_modules/@npmcli/config/lib/umask.js @@ -1,5 +1,9 @@ class Umask {} const parse = val => { + // this is run via nopt and parse field where everything is + // converted to a string first, ignoring coverage for now + // instead of figuring out what is happening under the hood in nopt + // istanbul ignore else if (typeof val === 'string') { if (/^0o?[0-7]+$/.test(val)) { return parseInt(val.replace(/^0o?/, ''), 8) @@ -8,15 +12,16 @@ const parse = val => { } else { throw new Error(`invalid umask value: ${val}`) } + } else { + if (typeof val !== 'number') { + throw new Error(`invalid umask value: ${val}`) + } + val = Math.floor(val) + if (val < 0 || val > 511) { + throw new Error(`invalid umask value: ${val}`) + } + return val } - if (typeof val !== 'number') { - throw new Error(`invalid umask value: ${val}`) - } - val = Math.floor(val) - if (val < 0 || val > 511) { - throw new Error(`invalid umask value: ${val}`) - } - return val } const validate = (data, k, val) => { diff --git a/node_modules/npm/node_modules/@npmcli/config/package.json b/node_modules/npm/node_modules/@npmcli/config/package.json index e0190fc..420981b 100644 --- a/node_modules/npm/node_modules/@npmcli/config/package.json +++ b/node_modules/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "6.2.0", + "version": "6.2.1", "files": [ "bin/", "lib/" @@ -24,8 +24,6 @@ "template-oss-apply": "template-oss-apply --force" }, "tap": { - "check-coverage": true, - "coverage-map": "map.js", "nyc-arg": [ "--exclude", "tap-snapshots/**" @@ -33,11 +31,13 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", + "@npmcli/mock-globals": "^1.0.0", "@npmcli/template-oss": "4.14.1", "tap": "^16.3.4" }, "dependencies": { "@npmcli/map-workspaces": "^3.0.2", + "ci-info": "^3.8.0", "ini": "^4.1.0", "nopt": "^7.0.0", "proc-log": "^3.0.0", diff --git a/node_modules/npm/node_modules/@npmcli/move-file/LICENSE.md b/node_modules/npm/node_modules/@npmcli/move-file/LICENSE.md deleted file mode 100644 index 072bf20..0000000 --- a/node_modules/npm/node_modules/@npmcli/move-file/LICENSE.md +++ /dev/null @@ -1,22 +0,0 @@ -MIT License - -Copyright (c) Sindre Sorhus (https://sindresorhus.com) -Copyright (c) npm, Inc. - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm/node_modules/@npmcli/move-file/lib/index.js b/node_modules/npm/node_modules/@npmcli/move-file/lib/index.js deleted file mode 100644 index 5789bb1..0000000 --- a/node_modules/npm/node_modules/@npmcli/move-file/lib/index.js +++ /dev/null @@ -1,185 +0,0 @@ -const { dirname, join, resolve, relative, isAbsolute } = require('path') -const rimraf_ = require('rimraf') -const { promisify } = require('util') -const { - access: access_, - accessSync, - copyFile: copyFile_, - copyFileSync, - readdir: readdir_, - readdirSync, - rename: rename_, - renameSync, - stat: stat_, - statSync, - lstat: lstat_, - lstatSync, - symlink: symlink_, - symlinkSync, - readlink: readlink_, - readlinkSync, -} = require('fs') - -const access = promisify(access_) -const copyFile = promisify(copyFile_) -const readdir = promisify(readdir_) -const rename = promisify(rename_) -const stat = promisify(stat_) -const lstat = promisify(lstat_) -const symlink = promisify(symlink_) -const readlink = promisify(readlink_) -const rimraf = promisify(rimraf_) -const rimrafSync = rimraf_.sync - -const mkdirp = require('mkdirp') - -const pathExists = async path => { - try { - await access(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const pathExistsSync = path => { - try { - accessSync(path) - return true - } catch (er) { - return er.code !== 'ENOENT' - } -} - -const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && await pathExists(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - await mkdirp(dirname(destination)) - - try { - await rename(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = await lstat(source) - if (sourceStat.isDirectory()) { - const files = await readdir(source) - await Promise.all(files.map((file) => - moveFile(join(source, file), join(destination, file), options, false, symlinks) - )) - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - await copyFile(source, destination) - } - } else { - throw error - } - } - - if (root) { - await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { - let target = await readlink(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = await stat(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - await symlink( - target, - symDestination, - targetStat - ) - })) - await rimraf(source) - } -} - -const moveFileSync = (source, destination, options = {}, root = true, symlinks = []) => { - if (!source || !destination) { - throw new TypeError('`source` and `destination` file required') - } - - options = { - overwrite: true, - ...options, - } - - if (!options.overwrite && pathExistsSync(destination)) { - throw new Error(`The destination file exists: ${destination}`) - } - - mkdirp.sync(dirname(destination)) - - try { - renameSync(source, destination) - } catch (error) { - if (error.code === 'EXDEV' || error.code === 'EPERM') { - const sourceStat = lstatSync(source) - if (sourceStat.isDirectory()) { - const files = readdirSync(source) - for (const file of files) { - moveFileSync(join(source, file), join(destination, file), options, false, symlinks) - } - } else if (sourceStat.isSymbolicLink()) { - symlinks.push({ source, destination }) - } else { - copyFileSync(source, destination) - } - } else { - throw error - } - } - - if (root) { - for (const { source: symSource, destination: symDestination } of symlinks) { - let target = readlinkSync(symSource) - // junction symlinks in windows will be absolute paths, so we need to - // make sure they point to the symlink destination - if (isAbsolute(target)) { - target = resolve(symDestination, relative(symSource, target)) - } - // try to determine what the actual file is so we can create the correct - // type of symlink in windows - let targetStat = 'file' - try { - targetStat = statSync(resolve(dirname(symSource), target)) - if (targetStat.isDirectory()) { - targetStat = 'junction' - } - } catch { - // targetStat remains 'file' - } - symlinkSync( - target, - symDestination, - targetStat - ) - } - rimrafSync(source) - } -} - -module.exports = moveFile -module.exports.sync = moveFileSync diff --git a/node_modules/npm/node_modules/@npmcli/move-file/package.json b/node_modules/npm/node_modules/@npmcli/move-file/package.json deleted file mode 100644 index 58793b9..0000000 --- a/node_modules/npm/node_modules/@npmcli/move-file/package.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "name": "@npmcli/move-file", - "version": "2.0.1", - "files": [ - "bin/", - "lib/" - ], - "main": "lib/index.js", - "description": "move a file (fork of move-file)", - "dependencies": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/move-file.git" - }, - "tap": { - "check-coverage": true - }, - "license": "MIT", - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js b/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js index 756837c..53558a3 100644 --- a/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js +++ b/node_modules/npm/node_modules/@npmcli/package-json/lib/index.js @@ -34,7 +34,23 @@ class PackageJson { 'bin', ]) + // npm pkg fix + static fixSteps = Object.freeze([ + 'binRefs', + 'bundleDependencies', + 'bundleDependenciesFalse', + 'fixNameField', + 'fixVersionField', + 'fixRepositoryField', + 'fixBinField', + 'fixDependencies', + 'fixScriptsField', + 'devDependencies', + 'scriptpath', + ]) + static prepareSteps = Object.freeze([ + '_id', '_attributes', 'bundledDependencies', 'bundleDependencies', @@ -52,37 +68,67 @@ class PackageJson { 'binRefs', ]) - // default behavior, just loads and parses - static async load (path) { - return await new PackageJson(path).load() + // create a new empty package.json, so we can save at the given path even + // though we didn't start from a parsed file + static async create (path, opts = {}) { + const p = new PackageJson() + await p.create(path) + if (opts.data) { + return p.update(opts.data) + } + return p + } + + // Loads a package.json at given path and JSON parses + static async load (path, opts = {}) { + const p = new PackageJson() + // Avoid try/catch if we aren't going to create + if (!opts.create) { + return p.load(path) + } + + try { + return await p.load(path) + } catch (err) { + if (!err.message.startsWith('Could not read package.json')) { + throw err + } + return await p.create(path) + } + } + + // npm pkg fix + static async fix (path, opts) { + const p = new PackageJson() + await p.load(path, true) + return p.fix(opts) } // read-package-json compatible behavior static async prepare (path, opts) { - return await new PackageJson(path).prepare(opts) + const p = new PackageJson() + await p.load(path, true) + return p.prepare(opts) } // read-package-json-fast compatible behavior static async normalize (path, opts) { - return await new PackageJson(path).normalize(opts) + const p = new PackageJson() + await p.load(path) + return p.normalize(opts) } - #filename #path - #manifest = {} + #manifest #readFileContent = '' - #fromIndex = false + #canSave = true - constructor (path) { + // Load content from given path + async load (path, parseIndex) { this.#path = path - this.#filename = resolve(path, 'package.json') - } - - async load (parseIndex) { let parseErr try { - this.#readFileContent = - await readFile(this.#filename, 'utf8') + this.#readFileContent = await readFile(this.filename, 'utf8') } catch (err) { err.message = `Could not read package.json: ${err}` if (!parseIndex) { @@ -92,7 +138,7 @@ class PackageJson { } if (parseErr) { - const indexFile = resolve(this.#path, 'index.js') + const indexFile = resolve(this.path, 'index.js') let indexFileContent try { indexFileContent = await readFile(indexFile, 'utf8') @@ -100,16 +146,22 @@ class PackageJson { throw parseErr } try { - this.#manifest = fromComment(indexFileContent) + this.fromComment(indexFileContent) } catch (err) { throw parseErr } - this.#fromIndex = true + // This wasn't a package.json so prevent saving + this.#canSave = false return this } + return this.fromJSON(this.#readFileContent) + } + + // Load data from a JSON string/buffer + fromJSON (data) { try { - this.#manifest = parseJSON(this.#readFileContent) + this.#manifest = parseJSON(data) } catch (err) { err.message = `Invalid package.json: ${err}` throw err @@ -117,6 +169,27 @@ class PackageJson { return this } + // Load data from a comment + // /**package { "name": "foo", "version": "1.2.3", ... } **/ + fromComment (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + this.#manifest = parseJSON(data) + return this + } + get content () { return this.#manifest } @@ -125,26 +198,33 @@ class PackageJson { return this.#path } + get filename () { + if (this.path) { + return resolve(this.path, 'package.json') + } + return undefined + } + + create (path) { + this.#path = path + this.#manifest = {} + return this + } + + // This should be the ONLY way to set content in the manifest update (content) { - // validates both current manifest and content param - const invalidContent = - typeof this.#manifest !== 'object' - || typeof content !== 'object' - if (invalidContent) { - throw Object.assign( - new Error(`Can't update invalid package.json data`), - { code: 'EPACKAGEJSONUPDATE' } - ) + if (!this.content) { + throw new Error('Can not update without content. Please `load` or `create`') } for (const step of knownSteps) { - this.#manifest = step({ content, originalContent: this.#manifest }) + this.#manifest = step({ content, originalContent: this.content }) } // unknown properties will just be overwitten for (const [key, value] of Object.entries(content)) { if (!knownKeys.has(key)) { - this.#manifest[key] = value + this.content[key] = value } } @@ -152,23 +232,23 @@ class PackageJson { } async save () { - if (this.#fromIndex) { + if (!this.#canSave) { throw new Error('No package.json to save to') } const { [Symbol.for('indent')]: indent, [Symbol.for('newline')]: newline, - } = this.#manifest + } = this.content const format = indent === undefined ? ' ' : indent const eol = newline === undefined ? '\n' : newline const fileContent = `${ - JSON.stringify(this.#manifest, null, format) + JSON.stringify(this.content, null, format) }\n` .replace(/\n/g, eol) if (fileContent.trim() !== this.#readFileContent.trim()) { - return await writeFile(this.#filename, fileContent) + return await writeFile(this.filename, fileContent) } } @@ -176,7 +256,6 @@ class PackageJson { if (!opts.steps) { opts.steps = this.constructor.normalizeSteps } - await this.load() await normalize(this, opts) return this } @@ -185,29 +264,16 @@ class PackageJson { if (!opts.steps) { opts.steps = this.constructor.prepareSteps } - await this.load(true) await normalize(this, opts) return this } -} - -// /**package { "name": "foo", "version": "1.2.3", ... } **/ -function fromComment (data) { - data = data.split(/^\/\*\*package(?:\s|$)/m) - if (data.length < 2) { - throw new Error('File has no package in comments') - } - data = data[1] - data = data.split(/\*\*\/$/m) - - if (data.length < 2) { - throw new Error('File has no package in comments') + async fix (opts = {}) { + // This one is not overridable + opts.steps = this.constructor.fixSteps + await normalize(this, opts) + return this } - data = data[0] - data = data.replace(/^\s*\*/mg, '') - - return parseJSON(data) } module.exports = PackageJson diff --git a/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js index 9594ef3..726b3f0 100644 --- a/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js +++ b/node_modules/npm/node_modules/@npmcli/package-json/lib/normalize.js @@ -1,20 +1,40 @@ const fs = require('fs/promises') const { glob } = require('glob') const normalizePackageBin = require('npm-normalize-package-bin') -const normalizePackageData = require('normalize-package-data') +const legacyFixer = require('normalize-package-data/lib/fixer.js') +const legacyMakeWarning = require('normalize-package-data/lib/make_warning.js') const path = require('path') const log = require('proc-log') const git = require('@npmcli/git') -const normalize = async (pkg, { strict, steps, root }) => { +// We don't want the `changes` array in here by default because this is a hot +// path for parsing packuments during install. So the calling method passes it +// in if it wants to track changes. +const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => { + if (!pkg.content) { + throw new Error('Can not normalize without content') + } const data = pkg.content const scripts = data.scripts || {} const pkgId = `${data.name ?? ''}@${data.version ?? ''}` + legacyFixer.warn = function () { + changes?.push(legacyMakeWarning.apply(null, arguments)) + } + + // name and version are load bearing so we have to clean them up first + if (steps.includes('fixNameField') || steps.includes('normalizeData')) { + legacyFixer.fixNameField(data, { strict, allowLegacyCase }) + } + + if (steps.includes('fixVersionField') || steps.includes('normalizeData')) { + legacyFixer.fixVersionField(data, strict) + } // remove attributes that start with "_" if (steps.includes('_attributes')) { for (const key in data) { if (key.startsWith('_')) { + changes?.push(`"${key}" was removed`) delete pkg.content[key] } } @@ -23,6 +43,7 @@ const normalize = async (pkg, { strict, steps, root }) => { // build the "_id" attribute if (steps.includes('_id')) { if (data.name && data.version) { + changes?.push(`"_id" was set to ${pkgId}`) data._id = pkgId } } @@ -32,20 +53,25 @@ const normalize = async (pkg, { strict, steps, root }) => { if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { data.bundleDependencies = data.bundledDependencies } + changes?.push(`Deleted incorrect "bundledDependencies"`) delete data.bundledDependencies } // expand "bundleDependencies: true or translate from object" if (steps.includes('bundleDependencies')) { const bd = data.bundleDependencies if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) { + changes?.push(`"bundleDependencies" was changed from "false" to "[]"`) data.bundleDependencies = [] } else if (bd === true) { + changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`) data.bundleDependencies = Object.keys(data.dependencies || {}) } else if (bd && typeof bd === 'object') { if (!Array.isArray(bd)) { + changes?.push(`"bundleDependencies" was changed from an object to an array`) data.bundleDependencies = Object.keys(bd) } } else { + changes?.push(`"bundleDependencies" was removed`) delete data.bundleDependencies } } @@ -58,9 +84,11 @@ const normalize = async (pkg, { strict, steps, root }) => { if (data.dependencies && data.optionalDependencies && typeof data.optionalDependencies === 'object') { for (const name in data.optionalDependencies) { + changes?.push(`optionalDependencies entry "${name}" was removed`) delete data.dependencies[name] } if (!Object.keys(data.dependencies).length) { + changes?.push(`empty "optionalDependencies" was removed`) delete data.dependencies } } @@ -74,6 +102,8 @@ const normalize = async (pkg, { strict, steps, root }) => { scripts.install = 'node-gyp rebuild' data.scripts = scripts data.gypfile = true + changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) + changes?.push(`"gypfile" was set to "true"`) } } } @@ -84,6 +114,7 @@ const normalize = async (pkg, { strict, steps, root }) => { await fs.access(path.join(pkg.path, 'server.js')) scripts.start = 'node server.js' data.scripts = scripts + changes?.push('"scripts.start" was set to "node server.js"') } catch { // do nothing } @@ -96,11 +127,14 @@ const normalize = async (pkg, { strict, steps, root }) => { for (const name in data.scripts) { if (typeof data.scripts[name] !== 'string') { delete data.scripts[name] + changes?.push(`invalid scripts entry "${name}" was removed`) } else if (steps.includes('scriptpath')) { data.scripts[name] = data.scripts[name].replace(spre, '') + changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`) } } } else { + changes?.push(`removed invalid "scripts"`) delete data.scripts } } @@ -108,6 +142,7 @@ const normalize = async (pkg, { strict, steps, root }) => { if (steps.includes('funding')) { if (data.funding && typeof data.funding === 'string') { data.funding = { url: data.funding } + changes?.push(`"funding" was changed to an object with a url attribute`) } } @@ -119,6 +154,7 @@ const normalize = async (pkg, { strict, steps, root }) => { .map(line => line.replace(/^\s*#.*$/, '').trim()) .filter(line => line) data.contributors = authors + changes.push('"contributors" was auto-populated with the contents of the "AUTHORS" file') } catch { // do nothing } @@ -145,6 +181,12 @@ const normalize = async (pkg, { strict, steps, root }) => { const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8') data.readme = readmeData data.readmeFilename = readmeFile + changes?.push(`"readme" was set to the contents of ${readmeFile}`) + changes?.push(`"readmeFilename" was set to ${readmeFile}`) + } + if (!data.readme) { + // this.warn('missingReadme') + data.readme = 'ERROR: No README data found!' } } @@ -270,9 +312,47 @@ const normalize = async (pkg, { strict, steps, root }) => { } } - // "normalizeData" from read-package-json + // "normalizeData" from "read-package-json", which was just a call through to + // "normalize-package-data". We only call the "fixer" functions because + // outside of that it was also clobbering _id (which we already conditionally + // do) and also adding the gypfile script (which we also already + // conditionally do) + + // Some steps are isolated so we can do a limited subset of these in `fix` + if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { + legacyFixer.fixRepositoryField(data) + } + + if (steps.includes('fixBinField') || steps.includes('normalizeData')) { + legacyFixer.fixBinField(data) + } + + if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { + legacyFixer.fixDependencies(data, strict) + } + + if (steps.includes('fixScriptsField') || steps.includes('normalizeData')) { + legacyFixer.fixScriptsField(data) + } + if (steps.includes('normalizeData')) { - normalizePackageData(data, strict) + const legacySteps = [ + 'fixDescriptionField', + 'fixModulesField', + 'fixFilesField', + 'fixManField', + 'fixBugsField', + 'fixKeywordsField', + 'fixBundleDependenciesField', + 'fixHomepageField', + 'fixReadmeField', + 'fixLicenseField', + 'fixPeople', + 'fixTypos', + ] + for (const legacyStep of legacySteps) { + legacyFixer[legacyStep](data) + } } // Warn if the bin references don't point to anything. This might be better diff --git a/node_modules/npm/node_modules/@npmcli/package-json/package.json b/node_modules/npm/node_modules/@npmcli/package-json/package.json index a4e2cba..4b9584d 100644 --- a/node_modules/npm/node_modules/@npmcli/package-json/package.json +++ b/node_modules/npm/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "3.1.1", + "version": "4.0.0", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ diff --git a/node_modules/npm/node_modules/@sigstore/tuf/LICENSE b/node_modules/npm/node_modules/@sigstore/tuf/LICENSE new file mode 100644 index 0000000..e9e7c16 --- /dev/null +++ b/node_modules/npm/node_modules/@sigstore/tuf/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2023 The Sigstore Authors + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/npm/node_modules/sigstore/dist/util/appdata.js b/node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js similarity index 61% rename from node_modules/npm/node_modules/sigstore/dist/util/appdata.js rename to node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js index d0c7f6f..c9a8ee9 100644 --- a/node_modules/npm/node_modules/sigstore/dist/util/appdata.js +++ b/node_modules/npm/node_modules/@sigstore/tuf/dist/appdata.js @@ -4,19 +4,37 @@ var __importDefault = (this && this.__importDefault) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.appDataPath = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ const os_1 = __importDefault(require("os")); const path_1 = __importDefault(require("path")); function appDataPath(name) { const homedir = os_1.default.homedir(); switch (process.platform) { + /* istanbul ignore next */ case 'darwin': { const appSupport = path_1.default.join(homedir, 'Library', 'Application Support'); return path_1.default.join(appSupport, name); } + /* istanbul ignore next */ case 'win32': { const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local'); return path_1.default.join(localAppData, name, 'Data'); } + /* istanbul ignore next */ default: { const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share'); return path_1.default.join(localData, name); diff --git a/node_modules/npm/node_modules/sigstore/dist/tuf/index.js b/node_modules/npm/node_modules/@sigstore/tuf/dist/client.js similarity index 63% rename from node_modules/npm/node_modules/sigstore/dist/tuf/index.js rename to node_modules/npm/node_modules/@sigstore/tuf/dist/client.js index 86a081d..08d6b61 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tuf/index.js +++ b/node_modules/npm/node_modules/@sigstore/tuf/dist/client.js @@ -1,32 +1,9 @@ "use strict"; -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.TUFClient = exports.getTrustedRoot = void 0; +exports.TUFClient = void 0; /* Copyright 2023 The Sigstore Authors. @@ -45,27 +22,12 @@ limitations under the License. const fs_1 = __importDefault(require("fs")); const path_1 = __importDefault(require("path")); const tuf_js_1 = require("tuf-js"); -const sigstore = __importStar(require("../types/sigstore")); -const util_1 = require("../util"); const target_1 = require("./target"); -const TRUSTED_ROOT_TARGET = 'trusted_root.json'; -const DEFAULT_CACHE_DIR = util_1.appdata.appDataPath('sigstore-js'); -const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; -const DEFAULT_TUF_ROOT_PATH = '../../store/public-good-instance-root.json'; -async function getTrustedRoot(options = {}) { - const client = new TUFClient(options); - const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); - return sigstore.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); -} -exports.getTrustedRoot = getTrustedRoot; class TUFClient { constructor(options) { - const cachePath = options.cachePath || DEFAULT_CACHE_DIR; - const tufRootPath = options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH); - const mirrorURL = options.mirrorURL || DEFAULT_MIRROR_URL; - initTufCache(cachePath, tufRootPath); - const remote = initRemoteConfig(cachePath, mirrorURL); - this.updater = initClient(cachePath, remote, options); + initTufCache(options.cachePath, options.rootPath); + const remote = initRemoteConfig(options.cachePath, options.mirrorURL); + this.updater = initClient(options.cachePath, remote, options); } async refresh() { return this.updater.refresh(); @@ -117,6 +79,7 @@ function initClient(cachePath, remote, options) { }; // tuf-js only supports a number for fetchRetries so we have to // convert the boolean and object options to a number. + /* istanbul ignore if */ if (typeof options.retry !== 'undefined') { if (typeof options.retry === 'number') { config.fetchRetries = options.retry; diff --git a/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js b/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js new file mode 100644 index 0000000..e13971b --- /dev/null +++ b/node_modules/npm/node_modules/@sigstore/tuf/dist/error.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = void 0; +class TUFError extends Error { + constructor({ code, message, cause, }) { + super(message); + this.code = code; + this.cause = cause; + this.name = this.constructor.name; + } +} +exports.TUFError = TUFError; diff --git a/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js b/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js new file mode 100644 index 0000000..0d201c3 --- /dev/null +++ b/node_modules/npm/node_modules/@sigstore/tuf/dist/index.js @@ -0,0 +1,55 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TUFError = exports.initTUF = exports.getTrustedRoot = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const protobuf_specs_1 = require("@sigstore/protobuf-specs"); +const appdata_1 = require("./appdata"); +const client_1 = require("./client"); +const DEFAULT_CACHE_DIR = 'sigstore-js'; +const DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev'; +const DEFAULT_TUF_ROOT_PATH = '../store/public-good-instance-root.json'; +const DEFAULT_RETRY = { retries: 2 }; +const DEFAULT_TIMEOUT = 5000; +const TRUSTED_ROOT_TARGET = 'trusted_root.json'; +async function getTrustedRoot( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET); + return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot)); +} +exports.getTrustedRoot = getTrustedRoot; +async function initTUF( +/* istanbul ignore next */ +options = {}) { + const client = createClient(options); + return client.refresh().then(() => client); +} +exports.initTUF = initTUF; +// Create a TUF client with default options +function createClient(options) { + /* istanbul ignore next */ + return new client_1.TUFClient({ + cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR), + rootPath: options.rootPath || require.resolve(DEFAULT_TUF_ROOT_PATH), + mirrorURL: options.mirrorURL || DEFAULT_MIRROR_URL, + retry: options.retry ?? DEFAULT_RETRY, + timeout: options.timeout ?? DEFAULT_TIMEOUT, + }); +} +var error_1 = require("./error"); +Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } }); diff --git a/node_modules/npm/node_modules/sigstore/dist/tuf/target.js b/node_modules/npm/node_modules/@sigstore/tuf/dist/target.js similarity index 92% rename from node_modules/npm/node_modules/sigstore/dist/tuf/target.js rename to node_modules/npm/node_modules/@sigstore/tuf/dist/target.js index d7df61e..29eaf99 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tuf/target.js +++ b/node_modules/npm/node_modules/@sigstore/tuf/dist/target.js @@ -20,14 +20,14 @@ See the License for the specific language governing permissions and limitations under the License. */ const fs_1 = __importDefault(require("fs")); -const error_1 = require("../error"); +const error_1 = require("./error"); // Downloads and returns the specified target from the provided TUF Updater. async function readTarget(tuf, targetPath) { const path = await getTargetPath(tuf, targetPath); return new Promise((resolve, reject) => { fs_1.default.readFile(path, 'utf-8', (err, data) => { if (err) { - reject(new error_1.InternalError({ + reject(new error_1.TUFError({ code: 'TUF_READ_TARGET_ERROR', message: `error reading target ${path}`, cause: err, @@ -49,14 +49,14 @@ async function getTargetPath(tuf, target) { targetInfo = await tuf.getTargetInfo(target); } catch (err) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_REFRESH_METADATA_ERROR', message: 'error refreshing TUF metadata', cause: err, }); } if (!targetInfo) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_FIND_TARGET_ERROR', message: `target ${target} not found`, }); @@ -69,7 +69,7 @@ async function getTargetPath(tuf, target) { path = await tuf.downloadTarget(targetInfo); } catch (err) { - throw new error_1.InternalError({ + throw new error_1.TUFError({ code: 'TUF_DOWNLOAD_TARGET_ERROR', message: `error downloading target ${path}`, cause: err, diff --git a/node_modules/npm/node_modules/@sigstore/tuf/package.json b/node_modules/npm/node_modules/@sigstore/tuf/package.json new file mode 100644 index 0000000..286d481 --- /dev/null +++ b/node_modules/npm/node_modules/@sigstore/tuf/package.json @@ -0,0 +1,41 @@ +{ + "name": "@sigstore/tuf", + "version": "1.0.2", + "description": "Client for the Sigstore TUF repository", + "main": "dist/index.js", + "types": "dist/index.d.ts", + "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", + "build": "tsc --build", + "test": "jest" + }, + "files": [ + "dist", + "store" + ], + "author": "bdehamer@github.com", + "license": "Apache-2.0", + "repository": { + "type": "git", + "url": "git+https://github.com/sigstore/sigstore-js.git" + }, + "bugs": { + "url": "https://github.com/sigstore/sigstore-js/issues" + }, + "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme", + "publishConfig": { + "provenance": true + }, + "devDependencies": { + "@sigstore/jest": "^0.0.0", + "@tufjs/repo-mock": "^1.1.0", + "@types/make-fetch-happen": "^10.0.0" + }, + "dependencies": { + "@sigstore/protobuf-specs": "^0.1.0", + "tuf-js": "^1.1.7" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } +} diff --git a/node_modules/npm/node_modules/sigstore/store/public-good-instance-root.json b/node_modules/npm/node_modules/@sigstore/tuf/store/public-good-instance-root.json similarity index 100% rename from node_modules/npm/node_modules/sigstore/store/public-good-instance-root.json rename to node_modules/npm/node_modules/@sigstore/tuf/store/public-good-instance-root.json diff --git a/node_modules/npm/node_modules/cacache/lib/util/glob.js b/node_modules/npm/node_modules/cacache/lib/util/glob.js index 3132a4d..8500c1c 100644 --- a/node_modules/npm/node_modules/cacache/lib/util/glob.js +++ b/node_modules/npm/node_modules/cacache/lib/util/glob.js @@ -1,6 +1,7 @@ 'use strict' const { glob } = require('glob') +const path = require('path') -const globify = (pattern) => pattern.split('//').join('/') +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/npm/node_modules/cacache/package.json b/node_modules/npm/node_modules/cacache/package.json index db17e3a..a6f6f9b 100644 --- a/node_modules/npm/node_modules/cacache/package.json +++ b/node_modules/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.2", + "version": "17.1.3", "cache-version": { "content": "2", "index": "5" diff --git a/node_modules/npm/node_modules/exponential-backoff/LICENSE b/node_modules/npm/node_modules/exponential-backoff/LICENSE new file mode 100644 index 0000000..7a4a3ea --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js b/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js new file mode 100644 index 0000000..a0aa0dc --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/backoff.js @@ -0,0 +1,118 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var options_1 = require("./options"); +var delay_factory_1 = require("./delay/delay.factory"); +function backOff(request, options) { + if (options === void 0) { options = {}; } + return __awaiter(this, void 0, void 0, function () { + var sanitizedOptions, backOff; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + sanitizedOptions = options_1.getSanitizedOptions(options); + backOff = new BackOff(request, sanitizedOptions); + return [4 /*yield*/, backOff.execute()]; + case 1: return [2 /*return*/, _a.sent()]; + } + }); + }); +} +exports.backOff = backOff; +var BackOff = /** @class */ (function () { + function BackOff(request, options) { + this.request = request; + this.options = options; + this.attemptNumber = 0; + } + BackOff.prototype.execute = function () { + return __awaiter(this, void 0, void 0, function () { + var e_1, shouldRetry; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + if (!!this.attemptLimitReached) return [3 /*break*/, 7]; + _a.label = 1; + case 1: + _a.trys.push([1, 4, , 6]); + return [4 /*yield*/, this.applyDelay()]; + case 2: + _a.sent(); + return [4 /*yield*/, this.request()]; + case 3: return [2 /*return*/, _a.sent()]; + case 4: + e_1 = _a.sent(); + this.attemptNumber++; + return [4 /*yield*/, this.options.retry(e_1, this.attemptNumber)]; + case 5: + shouldRetry = _a.sent(); + if (!shouldRetry || this.attemptLimitReached) { + throw e_1; + } + return [3 /*break*/, 6]; + case 6: return [3 /*break*/, 0]; + case 7: throw new Error("Something went wrong."); + } + }); + }); + }; + Object.defineProperty(BackOff.prototype, "attemptLimitReached", { + get: function () { + return this.attemptNumber >= this.options.numOfAttempts; + }, + enumerable: true, + configurable: true + }); + BackOff.prototype.applyDelay = function () { + return __awaiter(this, void 0, void 0, function () { + var delay; + return __generator(this, function (_a) { + switch (_a.label) { + case 0: + delay = delay_factory_1.DelayFactory(this.options, this.attemptNumber); + return [4 /*yield*/, delay.apply()]; + case 1: + _a.sent(); + return [2 /*return*/]; + } + }); + }); + }; + return BackOff; +}()); +//# sourceMappingURL=backoff.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js b/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js new file mode 100644 index 0000000..40e3407 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/delay/always/always.delay.js @@ -0,0 +1,25 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var AlwaysDelay = /** @class */ (function (_super) { + __extends(AlwaysDelay, _super); + function AlwaysDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + return AlwaysDelay; +}(delay_base_1.Delay)); +exports.AlwaysDelay = AlwaysDelay; +//# sourceMappingURL=always.delay.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js new file mode 100644 index 0000000..b146c2f --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.base.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var jitter_factory_1 = require("../jitter/jitter.factory"); +var Delay = /** @class */ (function () { + function Delay(options) { + this.options = options; + this.attempt = 0; + } + Delay.prototype.apply = function () { + var _this = this; + return new Promise(function (resolve) { return setTimeout(resolve, _this.jitteredDelay); }); + }; + Delay.prototype.setAttemptNumber = function (attempt) { + this.attempt = attempt; + }; + Object.defineProperty(Delay.prototype, "jitteredDelay", { + get: function () { + var jitter = jitter_factory_1.JitterFactory(this.options); + return jitter(this.delay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "delay", { + get: function () { + var constant = this.options.startingDelay; + var base = this.options.timeMultiple; + var power = this.numOfDelayedAttempts; + var delay = constant * Math.pow(base, power); + return Math.min(delay, this.options.maxDelay); + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(Delay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt; + }, + enumerable: true, + configurable: true + }); + return Delay; +}()); +exports.Delay = Delay; +//# sourceMappingURL=delay.base.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js new file mode 100644 index 0000000..33008db --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.factory.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var skip_first_delay_1 = require("./skip-first/skip-first.delay"); +var always_delay_1 = require("./always/always.delay"); +function DelayFactory(options, attempt) { + var delay = initDelayClass(options); + delay.setAttemptNumber(attempt); + return delay; +} +exports.DelayFactory = DelayFactory; +function initDelayClass(options) { + if (!options.delayFirstAttempt) { + return new skip_first_delay_1.SkipFirstDelay(options); + } + return new always_delay_1.AlwaysDelay(options); +} +//# sourceMappingURL=delay.factory.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js new file mode 100644 index 0000000..6fe2a5a --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/delay/delay.interface.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=delay.interface.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js b/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js new file mode 100644 index 0000000..73f8841 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/delay/skip-first/skip-first.delay.js @@ -0,0 +1,82 @@ +"use strict"; +var __extends = (this && this.__extends) || (function () { + var extendStatics = function (d, b) { + extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; + return extendStatics(d, b); + }; + return function (d, b) { + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; +})(); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __generator = (this && this.__generator) || function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var delay_base_1 = require("../delay.base"); +var SkipFirstDelay = /** @class */ (function (_super) { + __extends(SkipFirstDelay, _super); + function SkipFirstDelay() { + return _super !== null && _super.apply(this, arguments) || this; + } + SkipFirstDelay.prototype.apply = function () { + return __awaiter(this, void 0, void 0, function () { + return __generator(this, function (_a) { + return [2 /*return*/, this.isFirstAttempt ? true : _super.prototype.apply.call(this)]; + }); + }); + }; + Object.defineProperty(SkipFirstDelay.prototype, "isFirstAttempt", { + get: function () { + return this.attempt === 0; + }, + enumerable: true, + configurable: true + }); + Object.defineProperty(SkipFirstDelay.prototype, "numOfDelayedAttempts", { + get: function () { + return this.attempt - 1; + }, + enumerable: true, + configurable: true + }); + return SkipFirstDelay; +}(delay_base_1.Delay)); +exports.SkipFirstDelay = SkipFirstDelay; +//# sourceMappingURL=skip-first.delay.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js new file mode 100644 index 0000000..16cee36 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/full/full.jitter.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function fullJitter(delay) { + var jitteredDelay = Math.random() * delay; + return Math.round(jitteredDelay); +} +exports.fullJitter = fullJitter; +//# sourceMappingURL=full.jitter.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js new file mode 100644 index 0000000..8aafe45 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/jitter.factory.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var full_jitter_1 = require("./full/full.jitter"); +var no_jitter_1 = require("./no/no.jitter"); +function JitterFactory(options) { + switch (options.jitter) { + case "full": + return full_jitter_1.fullJitter; + case "none": + default: + return no_jitter_1.noJitter; + } +} +exports.JitterFactory = JitterFactory; +//# sourceMappingURL=jitter.factory.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js new file mode 100644 index 0000000..15a40bb --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/jitter/no/no.jitter.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function noJitter(delay) { + return delay; +} +exports.noJitter = noJitter; +//# sourceMappingURL=no.jitter.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/dist/options.js b/node_modules/npm/node_modules/exponential-backoff/dist/options.js new file mode 100644 index 0000000..1d2ca17 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/dist/options.js @@ -0,0 +1,31 @@ +"use strict"; +var __assign = (this && this.__assign) || function () { + __assign = Object.assign || function(t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) + t[p] = s[p]; + } + return t; + }; + return __assign.apply(this, arguments); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +var defaultOptions = { + delayFirstAttempt: false, + jitter: "none", + maxDelay: Infinity, + numOfAttempts: 10, + retry: function () { return true; }, + startingDelay: 100, + timeMultiple: 2 +}; +function getSanitizedOptions(options) { + var sanitized = __assign(__assign({}, defaultOptions), options); + if (sanitized.numOfAttempts < 1) { + sanitized.numOfAttempts = 1; + } + return sanitized; +} +exports.getSanitizedOptions = getSanitizedOptions; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/node_modules/npm/node_modules/exponential-backoff/package.json b/node_modules/npm/node_modules/exponential-backoff/package.json new file mode 100644 index 0000000..23232a0 --- /dev/null +++ b/node_modules/npm/node_modules/exponential-backoff/package.json @@ -0,0 +1,61 @@ +{ + "name": "exponential-backoff", + "version": "3.1.1", + "description": "A utility that allows retrying a function with an exponential delay between attempts.", + "files": [ + "dist/" + ], + "main": "dist/backoff.js", + "types": "dist/backoff.d.ts", + "scripts": { + "build": "tsc", + "test": "jest", + "test:watch": "jest --watch" + }, + "husky": { + "hooks": { + "pre-commit": "lint-staged" + } + }, + "lint-staged": { + "*.{ts,json,md}": [ + "prettier --write", + "git add" + ] + }, + "jest": { + "transform": { + "^.+\\.ts$": "ts-jest" + }, + "testRegex": "\\.spec\\.ts$", + "moduleFileExtensions": [ + "ts", + "js" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/coveo/exponential-backoff.git" + }, + "keywords": [ + "exponential", + "backoff", + "retry" + ], + "author": "Sami Sayegh", + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/coveo/exponential-backoff/issues" + }, + "homepage": "https://github.com/coveo/exponential-backoff#readme", + "devDependencies": { + "@types/jest": "^24.0.18", + "@types/node": "^10.14.21", + "husky": "^3.0.9", + "jest": "^24.9.0", + "lint-staged": "^9.4.2", + "prettier": "^1.18.2", + "ts-jest": "^24.1.0", + "typescript": "^3.6.4" + } +} diff --git a/node_modules/npm/node_modules/glob/dist/cjs/package.json b/node_modules/npm/node_modules/glob/dist/cjs/package.json index 8762de6..44b67c3 100644 --- a/node_modules/npm/node_modules/glob/dist/cjs/package.json +++ b/node_modules/npm/node_modules/glob/dist/cjs/package.json @@ -1,99 +1,4 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "name": "glob", - "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.4", - "bin": "./dist/cjs/src/bin.js", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "main": "./dist/cjs/src/index.js", - "module": "./dist/mjs/index.js", - "types": "./dist/mjs/index.d.ts", - "exports": { - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/src/index.d.ts", - "default": "./dist/cjs/src/index.js" - } - } - }, - "files": [ - "dist" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "format": "prettier --write . --loglevel warn", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "prepublish": "npm run benchclean", - "profclean": "rm -f v8.log profile.txt", - "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", - "prebench": "npm run prepare", - "bench": "bash benchmark.sh", - "preprof": "npm run prepare", - "prof": "bash prof.sh", - "benchclean": "node benchclean.js" - }, - "prettier": { - "semi": false, - "printWidth": 75, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", - "path-scurry": "^1.7.0" - }, - "devDependencies": { - "@types/node": "^18.11.18", - "@types/tap": "^15.0.7", - "c8": "^7.12.0", - "memfs": "^3.4.13", - "mkdirp": "^2.1.4", - "prettier": "^2.8.3", - "rimraf": "^4.1.3", - "tap": "^16.3.4", - "ts-node": "^10.9.1", - "typedoc": "^0.23.24", - "typescript": "^4.9.4" - }, - "tap": { - "before": "test/00-setup.ts", - "coverage": false, - "node-arg": [ - "--no-warnings", - "--loader", - "ts-node/esm" - ], - "ts": false - }, - "license": "ISC", - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } + "version": "10.2.7", + "type": "commonjs" } diff --git a/node_modules/npm/node_modules/glob/dist/mjs/package.json b/node_modules/npm/node_modules/glob/dist/mjs/package.json index e066bfa..ac4c42f 100644 --- a/node_modules/npm/node_modules/glob/dist/mjs/package.json +++ b/node_modules/npm/node_modules/glob/dist/mjs/package.json @@ -1,4 +1,4 @@ { - "version": "10.2.3", + "version": "10.2.7", "type": "module" } diff --git a/node_modules/npm/node_modules/glob/package.json b/node_modules/npm/node_modules/glob/package.json index e11e8e3..ba9732c 100644 --- a/node_modules/npm/node_modules/glob/package.json +++ b/node_modules/npm/node_modules/glob/package.json @@ -1,8 +1,8 @@ { - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", "name": "glob", "description": "the most correct and second fastest glob implementation in JavaScript", - "version": "10.2.4", + "version": "10.2.7", "bin": "./dist/cjs/src/bin.js", "repository": { "type": "git", @@ -31,8 +31,7 @@ "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", "test": "c8 tap", @@ -62,12 +61,12 @@ "dependencies": { "foreground-child": "^3.1.0", "jackspeak": "^2.0.3", - "minimatch": "^9.0.0", - "minipass": "^5.0.0 || ^6.0.0", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2", "path-scurry": "^1.7.0" }, "devDependencies": { - "@types/node": "^18.11.18", + "@types/node": "^20.2.1", "@types/tap": "^15.0.7", "c8": "^7.12.0", "memfs": "^3.4.13", diff --git a/node_modules/npm/node_modules/infer-owner/LICENSE b/node_modules/npm/node_modules/infer-owner/LICENSE deleted file mode 100644 index 20a4762..0000000 --- a/node_modules/npm/node_modules/infer-owner/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/infer-owner/index.js b/node_modules/npm/node_modules/infer-owner/index.js deleted file mode 100644 index a7bddcb..0000000 --- a/node_modules/npm/node_modules/infer-owner/index.js +++ /dev/null @@ -1,71 +0,0 @@ -const cache = new Map() -const fs = require('fs') -const { dirname, resolve } = require('path') - - -const lstat = path => new Promise((res, rej) => - fs.lstat(path, (er, st) => er ? rej(er) : res(st))) - -const inferOwner = path => { - path = resolve(path) - if (cache.has(path)) - return Promise.resolve(cache.get(path)) - - const statThen = st => { - const { uid, gid } = st - cache.set(path, { uid, gid }) - return { uid, gid } - } - const parent = dirname(path) - const parentTrap = parent === path ? null : er => { - return inferOwner(parent).then((owner) => { - cache.set(path, owner) - return owner - }) - } - return lstat(path).then(statThen, parentTrap) -} - -const inferOwnerSync = path => { - path = resolve(path) - if (cache.has(path)) - return cache.get(path) - - const parent = dirname(path) - - // avoid obscuring call site by re-throwing - // "catch" the error by returning from a finally, - // only if we're not at the root, and the parent call works. - let threw = true - try { - const st = fs.lstatSync(path) - threw = false - const { uid, gid } = st - cache.set(path, { uid, gid }) - return { uid, gid } - } finally { - if (threw && parent !== path) { - const owner = inferOwnerSync(parent) - cache.set(path, owner) - return owner // eslint-disable-line no-unsafe-finally - } - } -} - -const inflight = new Map() -module.exports = path => { - path = resolve(path) - if (inflight.has(path)) - return Promise.resolve(inflight.get(path)) - const p = inferOwner(path).then(owner => { - inflight.delete(path) - return owner - }) - inflight.set(path, p) - return p -} -module.exports.sync = inferOwnerSync -module.exports.clearCache = () => { - cache.clear() - inflight.clear() -} diff --git a/node_modules/npm/node_modules/infer-owner/package.json b/node_modules/npm/node_modules/infer-owner/package.json deleted file mode 100644 index c4b2b6e..0000000 --- a/node_modules/npm/node_modules/infer-owner/package.json +++ /dev/null @@ -1,26 +0,0 @@ -{ - "name": "infer-owner", - "version": "1.0.4", - "description": "Infer the owner of a path based on the owner of its nearest existing parent", - "author": "Isaac Z. Schlueter (https://izs.me)", - "license": "ISC", - "scripts": { - "test": "tap -J test/*.js --100", - "snap": "TAP_SNAPSHOT=1 tap -J test/*.js --100", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "devDependencies": { - "mutate-fs": "^2.1.1", - "tap": "^12.4.2" - }, - "main": "index.js", - "repository": "https://github.com/npm/infer-owner", - "publishConfig": { - "access": "public" - }, - "files": [ - "index.js" - ] -} diff --git a/node_modules/npm/node_modules/ini/lib/ini.js b/node_modules/npm/node_modules/ini/lib/ini.js index 763c829..724d69d 100644 --- a/node_modules/npm/node_modules/ini/lib/ini.js +++ b/node_modules/npm/node_modules/ini/lib/ini.js @@ -8,8 +8,9 @@ const encode = (obj, opt = {}) => { opt.newline = opt.newline === true opt.sort = opt.sort === true opt.whitespace = opt.whitespace === true || opt.align === true + // The `typeof` check is required because accessing the `process` directly fails on browsers. /* istanbul ignore next */ - opt.platform = opt.platform || process?.platform + opt.platform = opt.platform || (typeof process !== 'undefined' && process.platform) opt.bracketedArray = opt.bracketedArray !== false /* istanbul ignore next */ @@ -172,8 +173,8 @@ const decode = (str, opt = {}) => { const remove = [] for (const k of Object.keys(out)) { if (!hasOwnProperty.call(out, k) || - typeof out[k] !== 'object' || - Array.isArray(out[k])) { + typeof out[k] !== 'object' || + Array.isArray(out[k])) { continue } diff --git a/node_modules/npm/node_modules/ini/package.json b/node_modules/npm/node_modules/ini/package.json index 5dd968e..c1a50e9 100644 --- a/node_modules/npm/node_modules/ini/package.json +++ b/node_modules/npm/node_modules/ini/package.json @@ -2,7 +2,7 @@ "author": "GitHub Inc.", "name": "ini", "description": "An ini encoder/decoder for node", - "version": "4.1.0", + "version": "4.1.1", "repository": { "type": "git", "url": "https://github.com/npm/ini.git" @@ -20,7 +20,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.13.0", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.1" }, "license": "ISC", @@ -33,7 +33,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.13.0", + "version": "4.15.1", "publish": "true" }, "tap": { diff --git a/node_modules/npm/node_modules/is-core-module/core.json b/node_modules/npm/node_modules/is-core-module/core.json index 9a51663..af29f0b 100644 --- a/node_modules/npm/node_modules/is-core-module/core.json +++ b/node_modules/npm/node_modules/is-core-module/core.json @@ -114,7 +114,7 @@ "node:string_decoder": [">= 14.18 && < 15", ">= 16"], "sys": [">= 0.4 && < 0.7", ">= 0.8"], "node:sys": [">= 14.18 && < 15", ">= 16"], - "test/reporters": [">= 19.9", ">= 20"], + "test/reporters": ">= 19.9 && < 20.2", "node:test/reporters": [">= 19.9", ">= 20"], "node:test": [">= 16.17 && < 17", ">= 18"], "timers": true, diff --git a/node_modules/npm/node_modules/is-core-module/package.json b/node_modules/npm/node_modules/is-core-module/package.json index 715299b..62bb065 100644 --- a/node_modules/npm/node_modules/is-core-module/package.json +++ b/node_modules/npm/node_modules/is-core-module/package.json @@ -1,6 +1,6 @@ { "name": "is-core-module", - "version": "2.12.0", + "version": "2.12.1", "description": "Is this specifier a node.js core module?", "main": "index.js", "sideEffects": false, diff --git a/node_modules/npm/node_modules/jackspeak/package.json b/node_modules/npm/node_modules/jackspeak/package.json index 8d85e7f..afaa43e 100644 --- a/node_modules/npm/node_modules/jackspeak/package.json +++ b/node_modules/npm/node_modules/jackspeak/package.json @@ -1,6 +1,6 @@ { "name": "jackspeak", - "version": "2.2.0", + "version": "2.2.1", "description": "A very strict and proper argument parser.", "main": "./dist/cjs/index.js", "module": "./dist/mjs/index.js", diff --git a/node_modules/npm/node_modules/libnpmdiff/package.json b/node_modules/npm/node_modules/libnpmdiff/package.json index 9e2e96b..2ef51fb 100644 --- a/node_modules/npm/node_modules/libnpmdiff/package.json +++ b/node_modules/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "5.0.17", + "version": "5.0.19", "description": "The registry diff", "repository": { "type": "git", @@ -46,7 +46,7 @@ "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/disparity-colors": "^3.0.0", "@npmcli/installed-package-contents": "^2.0.2", "binary-extensions": "^2.2.0", diff --git a/node_modules/npm/node_modules/libnpmexec/package.json b/node_modules/npm/node_modules/libnpmexec/package.json index 1fa85ff..290d895 100644 --- a/node_modules/npm/node_modules/libnpmexec/package.json +++ b/node_modules/npm/node_modules/libnpmexec/package.json @@ -1,6 +1,6 @@ { "name": "libnpmexec", - "version": "6.0.0", + "version": "6.0.2", "files": [ "bin/", "lib/" @@ -56,11 +56,10 @@ "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", - "minify-registry-metadata": "^3.0.0", "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "ci-info": "^3.7.1", "npm-package-arg": "^10.1.0", diff --git a/node_modules/npm/node_modules/libnpmfund/package.json b/node_modules/npm/node_modules/libnpmfund/package.json index 88cff7a..8e0b6d0 100644 --- a/node_modules/npm/node_modules/libnpmfund/package.json +++ b/node_modules/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "4.0.17", + "version": "4.0.19", "main": "lib/index.js", "files": [ "bin/", @@ -45,7 +45,7 @@ "tap": "^16.3.4" }, "dependencies": { - "@npmcli/arborist": "^6.2.9" + "@npmcli/arborist": "^6.3.0" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/node_modules/npm/node_modules/libnpmpack/package.json b/node_modules/npm/node_modules/libnpmpack/package.json index 8589f5d..12cf7aa 100644 --- a/node_modules/npm/node_modules/libnpmpack/package.json +++ b/node_modules/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "5.0.17", + "version": "5.0.19", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -36,7 +36,7 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^6.2.9", + "@npmcli/arborist": "^6.3.0", "@npmcli/run-script": "^6.0.0", "npm-package-arg": "^10.1.0", "pacote": "^15.0.8" diff --git a/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js b/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js index ebe4a24..398db1b 100644 --- a/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js +++ b/node_modules/npm/node_modules/libnpmpublish/lib/provenance.js @@ -1,69 +1,196 @@ const { sigstore } = require('sigstore') const { readFile } = require('fs/promises') +const ci = require('ci-info') +const { env } = process const INTOTO_PAYLOAD_TYPE = 'application/vnd.in-toto+json' -const INTOTO_STATEMENT_TYPE = 'https://in-toto.io/Statement/v0.1' -const SLSA_PREDICATE_TYPE = 'https://slsa.dev/provenance/v0.2' +const INTOTO_STATEMENT_V01_TYPE = 'https://in-toto.io/Statement/v0.1' +const INTOTO_STATEMENT_V1_TYPE = 'https://in-toto.io/Statement/v1' +const SLSA_PREDICATE_V02_TYPE = 'https://slsa.dev/provenance/v0.2' +const SLSA_PREDICATE_V1_TYPE = 'https://slsa.dev/provenance/v1' -const BUILDER_ID = 'https://github.com/actions/runner' -const BUILD_TYPE_PREFIX = 'https://github.com/npm/cli/gha' -const BUILD_TYPE_VERSION = 'v2' +const GITHUB_BUILDER_ID_PREFIX = 'https://github.com/actions/runner' +const GITHUB_BUILD_TYPE = 'https://slsa-framework.github.io/github-actions-buildtypes/workflow/v1' + +const GITLAB_BUILD_TYPE_PREFIX = 'https://github.com/npm/cli/gitlab' +const GITLAB_BUILD_TYPE_VERSION = 'v0alpha1' const generateProvenance = async (subject, opts) => { - const { env } = process - /* istanbul ignore next - not covering missing env var case */ - const [workflowPath] = (env.GITHUB_WORKFLOW_REF || '') - .replace(env.GITHUB_REPOSITORY + '/', '') - .split('@') - const payload = { - _type: INTOTO_STATEMENT_TYPE, - subject, - predicateType: SLSA_PREDICATE_TYPE, - predicate: { - buildType: `${BUILD_TYPE_PREFIX}/${BUILD_TYPE_VERSION}`, - builder: { id: BUILDER_ID }, - invocation: { - configSource: { - uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, - digest: { - sha1: env.GITHUB_SHA, + let payload + if (ci.GITHUB_ACTIONS) { + /* istanbul ignore next - not covering missing env var case */ + const [workflowPath, workflowRef] = (env.GITHUB_WORKFLOW_REF || '') + .replace(env.GITHUB_REPOSITORY + '/', '') + .split('@') + payload = { + _type: INTOTO_STATEMENT_V1_TYPE, + subject, + predicateType: SLSA_PREDICATE_V1_TYPE, + predicate: { + buildDefinition: { + buildType: GITHUB_BUILD_TYPE, + externalParameters: { + workflow: { + ref: workflowRef, + repository: `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}`, + path: workflowPath, + }, + }, + internalParameters: { + github: { + event_name: env.GITHUB_EVENT_NAME, + repository_id: env.GITHUB_REPOSITORY_ID, + repository_owner_id: env.GITHUB_REPOSITORY_OWNER_ID, + }, }, - entryPoint: workflowPath, + resolvedDependencies: [ + { + uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, + digest: { + gitCommit: env.GITHUB_SHA, + }, + }, + ], }, - parameters: {}, - environment: { - GITHUB_EVENT_NAME: env.GITHUB_EVENT_NAME, - GITHUB_REF: env.GITHUB_REF, - GITHUB_REPOSITORY: env.GITHUB_REPOSITORY, - GITHUB_REPOSITORY_ID: env.GITHUB_REPOSITORY_ID, - GITHUB_REPOSITORY_OWNER_ID: env.GITHUB_REPOSITORY_OWNER_ID, - GITHUB_RUN_ATTEMPT: env.GITHUB_RUN_ATTEMPT, - GITHUB_RUN_ID: env.GITHUB_RUN_ID, - GITHUB_SHA: env.GITHUB_SHA, - GITHUB_WORKFLOW_REF: env.GITHUB_WORKFLOW_REF, - GITHUB_WORKFLOW_SHA: env.GITHUB_WORKFLOW_SHA, + runDetails: { + builder: { id: `${GITHUB_BUILDER_ID_PREFIX}/${env.RUNNER_ENVIRONMENT}` }, + metadata: { + /* eslint-disable-next-line max-len */ + invocationId: `${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}/actions/runs/${env.GITHUB_RUN_ID}/attempts/${env.GITHUB_RUN_ATTEMPT}`, + }, }, }, - metadata: { - buildInvocationId: `${env.GITHUB_RUN_ID}-${env.GITHUB_RUN_ATTEMPT}`, - completeness: { - parameters: false, - environment: false, - materials: false, + } + } + if (ci.GITLAB) { + payload = { + _type: INTOTO_STATEMENT_V01_TYPE, + subject, + predicateType: SLSA_PREDICATE_V02_TYPE, + predicate: { + buildType: `${GITLAB_BUILD_TYPE_PREFIX}/${GITLAB_BUILD_TYPE_VERSION}`, + builder: { id: `${env.CI_PROJECT_URL}/-/runners/${env.CI_RUNNER_ID}` }, + invocation: { + configSource: { + uri: `git+${env.CI_PROJECT_URL}`, + digest: { + sha1: env.CI_COMMIT_SHA, + }, + entryPoint: env.CI_JOB_NAME, + }, + parameters: { + CI: env.CI, + CI_API_GRAPHQL_URL: env.CI_API_GRAPHQL_URL, + CI_API_V4_URL: env.CI_API_V4_URL, + CI_BUILD_BEFORE_SHA: env.CI_BUILD_BEFORE_SHA, + CI_BUILD_ID: env.CI_BUILD_ID, + CI_BUILD_NAME: env.CI_BUILD_NAME, + CI_BUILD_REF: env.CI_BUILD_REF, + CI_BUILD_REF_NAME: env.CI_BUILD_REF_NAME, + CI_BUILD_REF_SLUG: env.CI_BUILD_REF_SLUG, + CI_BUILD_STAGE: env.CI_BUILD_STAGE, + CI_COMMIT_BEFORE_SHA: env.CI_COMMIT_BEFORE_SHA, + CI_COMMIT_BRANCH: env.CI_COMMIT_BRANCH, + CI_COMMIT_REF_NAME: env.CI_COMMIT_REF_NAME, + CI_COMMIT_REF_PROTECTED: env.CI_COMMIT_REF_PROTECTED, + CI_COMMIT_REF_SLUG: env.CI_COMMIT_REF_SLUG, + CI_COMMIT_SHA: env.CI_COMMIT_SHA, + CI_COMMIT_SHORT_SHA: env.CI_COMMIT_SHORT_SHA, + CI_COMMIT_TIMESTAMP: env.CI_COMMIT_TIMESTAMP, + CI_COMMIT_TITLE: env.CI_COMMIT_TITLE, + CI_CONFIG_PATH: env.CI_CONFIG_PATH, + CI_DEFAULT_BRANCH: env.CI_DEFAULT_BRANCH, + CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX: + env.CI_DEPENDENCY_PROXY_DIRECT_GROUP_IMAGE_PREFIX, + CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX: env.CI_DEPENDENCY_PROXY_GROUP_IMAGE_PREFIX, + CI_DEPENDENCY_PROXY_SERVER: env.CI_DEPENDENCY_PROXY_SERVER, + CI_DEPENDENCY_PROXY_USER: env.CI_DEPENDENCY_PROXY_USER, + CI_JOB_ID: env.CI_JOB_ID, + CI_JOB_NAME: env.CI_JOB_NAME, + CI_JOB_NAME_SLUG: env.CI_JOB_NAME_SLUG, + CI_JOB_STAGE: env.CI_JOB_STAGE, + CI_JOB_STARTED_AT: env.CI_JOB_STARTED_AT, + CI_JOB_URL: env.CI_JOB_URL, + CI_NODE_TOTAL: env.CI_NODE_TOTAL, + CI_PAGES_DOMAIN: env.CI_PAGES_DOMAIN, + CI_PAGES_URL: env.CI_PAGES_URL, + CI_PIPELINE_CREATED_AT: env.CI_PIPELINE_CREATED_AT, + CI_PIPELINE_ID: env.CI_PIPELINE_ID, + CI_PIPELINE_IID: env.CI_PIPELINE_IID, + CI_PIPELINE_SOURCE: env.CI_PIPELINE_SOURCE, + CI_PIPELINE_URL: env.CI_PIPELINE_URL, + CI_PROJECT_CLASSIFICATION_LABEL: env.CI_PROJECT_CLASSIFICATION_LABEL, + CI_PROJECT_DESCRIPTION: env.CI_PROJECT_DESCRIPTION, + CI_PROJECT_ID: env.CI_PROJECT_ID, + CI_PROJECT_NAME: env.CI_PROJECT_NAME, + CI_PROJECT_NAMESPACE: env.CI_PROJECT_NAMESPACE, + CI_PROJECT_NAMESPACE_ID: env.CI_PROJECT_NAMESPACE_ID, + CI_PROJECT_PATH: env.CI_PROJECT_PATH, + CI_PROJECT_PATH_SLUG: env.CI_PROJECT_PATH_SLUG, + CI_PROJECT_REPOSITORY_LANGUAGES: env.CI_PROJECT_REPOSITORY_LANGUAGES, + CI_PROJECT_ROOT_NAMESPACE: env.CI_PROJECT_ROOT_NAMESPACE, + CI_PROJECT_TITLE: env.CI_PROJECT_TITLE, + CI_PROJECT_URL: env.CI_PROJECT_URL, + CI_PROJECT_VISIBILITY: env.CI_PROJECT_VISIBILITY, + CI_REGISTRY: env.CI_REGISTRY, + CI_REGISTRY_IMAGE: env.CI_REGISTRY_IMAGE, + CI_REGISTRY_USER: env.CI_REGISTRY_USER, + CI_RUNNER_DESCRIPTION: env.CI_RUNNER_DESCRIPTION, + CI_RUNNER_ID: env.CI_RUNNER_ID, + CI_RUNNER_TAGS: env.CI_RUNNER_TAGS, + CI_SERVER_HOST: env.CI_SERVER_HOST, + CI_SERVER_NAME: env.CI_SERVER_NAME, + CI_SERVER_PORT: env.CI_SERVER_PORT, + CI_SERVER_PROTOCOL: env.CI_SERVER_PROTOCOL, + CI_SERVER_REVISION: env.CI_SERVER_REVISION, + CI_SERVER_SHELL_SSH_HOST: env.CI_SERVER_SHELL_SSH_HOST, + CI_SERVER_SHELL_SSH_PORT: env.CI_SERVER_SHELL_SSH_PORT, + CI_SERVER_URL: env.CI_SERVER_URL, + CI_SERVER_VERSION: env.CI_SERVER_VERSION, + CI_SERVER_VERSION_MAJOR: env.CI_SERVER_VERSION_MAJOR, + CI_SERVER_VERSION_MINOR: env.CI_SERVER_VERSION_MINOR, + CI_SERVER_VERSION_PATCH: env.CI_SERVER_VERSION_PATCH, + CI_TEMPLATE_REGISTRY_HOST: env.CI_TEMPLATE_REGISTRY_HOST, + GITLAB_CI: env.GITLAB_CI, + GITLAB_FEATURES: env.GITLAB_FEATURES, + GITLAB_USER_ID: env.GITLAB_USER_ID, + GITLAB_USER_LOGIN: env.GITLAB_USER_LOGIN, + RUNNER_GENERATE_ARTIFACTS_METADATA: env.RUNNER_GENERATE_ARTIFACTS_METADATA, + }, + environment: { + name: env.CI_RUNNER_DESCRIPTION, + architecture: env.CI_RUNNER_EXECUTABLE_ARCH, + server: env.CI_SERVER_URL, + project: env.CI_PROJECT_PATH, + job: { + id: env.CI_JOB_ID, + }, + pipeline: { + id: env.CI_PIPELINE_ID, + ref: env.CI_CONFIG_PATH, + }, + }, }, - reproducible: false, - }, - materials: [ - { - uri: `git+${env.GITHUB_SERVER_URL}/${env.GITHUB_REPOSITORY}@${env.GITHUB_REF}`, - digest: { - sha1: env.GITHUB_SHA, + metadata: { + buildInvocationId: `${env.CI_JOB_URL}`, + completeness: { + parameters: true, + environment: true, + materials: false, }, + reproducible: false, }, - ], - }, + materials: [ + { + uri: `git+${env.CI_PROJECT_URL}`, + digest: { + sha1: env.CI_COMMIT_SHA, + }, + }, + ], + }, + } } - return sigstore.attest(Buffer.from(JSON.stringify(payload)), INTOTO_PAYLOAD_TYPE, opts) } diff --git a/node_modules/npm/node_modules/libnpmpublish/lib/publish.js b/node_modules/npm/node_modules/libnpmpublish/lib/publish.js index 3749c3c..554eb9b 100644 --- a/node_modules/npm/node_modules/libnpmpublish/lib/publish.js +++ b/node_modules/npm/node_modules/libnpmpublish/lib/publish.js @@ -166,7 +166,7 @@ const buildMetadata = async (registry, manifest, tarballData, spec, opts) => { provenanceBundle = await generateProvenance([subject], opts) /* eslint-disable-next-line max-len */ - log.notice('publish', 'Signed provenance statement with source and build information from GitHub Actions') + log.notice('publish', `Signed provenance statement with source and build information from ${ciInfo.name}`) const tlogEntry = provenanceBundle?.verificationMaterial?.tlogEntries[0] /* istanbul ignore else */ @@ -242,19 +242,27 @@ const patchMetadata = (current, newData) => { // Check that all the prereqs are met for provenance generation const ensureProvenanceGeneration = async (registry, spec, opts) => { - // Ensure that we're running in GHA, currently the only supported build environment - if (ciInfo.name !== 'GitHub Actions') { - throw Object.assign( - new Error('Automatic provenance generation not supported outside of GitHub Actions'), - { code: 'EUSAGE' } - ) - } - - // Ensure that the GHA OIDC token is available - if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) { + if (ciInfo.GITHUB_ACTIONS) { + // Ensure that the GHA OIDC token is available + if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL) { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'), + { code: 'EUSAGE' } + ) + } + } else if (ciInfo.GITLAB) { + // Ensure that the Sigstore OIDC token is available + if (!process.env.SIGSTORE_ID_TOKEN) { + throw Object.assign( + /* eslint-disable-next-line max-len */ + new Error('Provenance generation in GitLab CI requires "SIGSTORE_ID_TOKEN" with "sigstore" audience to be present in "id_tokens". For more info see:\nhttps://docs.gitlab.com/ee/ci/secrets/id_token_authentication.html'), + { code: 'EUSAGE' } + ) + } + } else { throw Object.assign( - /* eslint-disable-next-line max-len */ - new Error('Provenance generation in GitHub Actions requires "write" access to the "id-token" permission'), + new Error('Automatic provenance generation not supported for provider: ' + ciInfo.name), { code: 'EUSAGE' } ) } @@ -264,7 +272,7 @@ const ensureProvenanceGeneration = async (registry, spec, opts) => { // the package is always private and require `--access public` to publish // with provenance. let visibility = { public: false } - if (true && opts.access !== 'public') { + if (opts.access !== 'public') { try { const res = await npmFetch .json(`${registry}/-/package/${spec.escapedName}/visibility`, opts) diff --git a/node_modules/npm/node_modules/libnpmpublish/package.json b/node_modules/npm/node_modules/libnpmpublish/package.json index 0e86861..6ea6a71 100644 --- a/node_modules/npm/node_modules/libnpmpublish/package.json +++ b/node_modules/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "7.3.0", + "version": "7.5.0", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", diff --git a/node_modules/npm/node_modules/minimatch/dist/cjs/index.js b/node_modules/npm/node_modules/minimatch/dist/cjs/index.js index 3cbc67f..d70e681 100644 --- a/node_modules/npm/node_modules/minimatch/dist/cjs/index.js +++ b/node_modules/npm/node_modules/minimatch/dist/cjs/index.js @@ -608,39 +608,35 @@ class Minimatch { // the parts match. matchOne(file, pattern, partial = false) { const options = this.options; - // a UNC pattern like //?/c:/* can match a path like c:/x - // and vice versa + // UNC paths like //?/X:/... can match X:/... and vice versa + // Drive letters in absolute drive or unc paths are always compared + // case-insensitively. if (this.isWindows) { - const fileUNC = file[0] === '' && + const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]); + const fileUNC = !fileDrive && + file[0] === '' && file[1] === '' && file[2] === '?' && - typeof file[3] === 'string' && /^[a-z]:$/i.test(file[3]); - const patternUNC = pattern[0] === '' && + const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]); + const patternUNC = !patternDrive && + pattern[0] === '' && pattern[1] === '' && pattern[2] === '?' && typeof pattern[3] === 'string' && /^[a-z]:$/i.test(pattern[3]); - if (fileUNC && patternUNC) { - const fd = file[3]; - const pd = pattern[3]; + const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined; + const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined; + if (typeof fdi === 'number' && typeof pdi === 'number') { + const [fd, pd] = [file[fdi], pattern[pdi]]; if (fd.toLowerCase() === pd.toLowerCase()) { - file[3] = pd; - } - } - else if (patternUNC && typeof file[0] === 'string') { - const pd = pattern[3]; - const fd = file[0]; - if (pd.toLowerCase() === fd.toLowerCase()) { - pattern[3] = fd; - pattern = pattern.slice(3); - } - } - else if (fileUNC && typeof pattern[0] === 'string') { - const fd = file[3]; - if (fd.toLowerCase() === pattern[0].toLowerCase()) { - pattern[0] = fd; - file = file.slice(3); + pattern[pdi] = fd; + if (pdi > fdi) { + pattern = pattern.slice(pdi); + } + else if (fdi > pdi) { + file = file.slice(fdi); + } } } } diff --git a/node_modules/npm/node_modules/minimatch/dist/mjs/index.js b/node_modules/npm/node_modules/minimatch/dist/mjs/index.js index 0d5e956..831b6a6 100644 --- a/node_modules/npm/node_modules/minimatch/dist/mjs/index.js +++ b/node_modules/npm/node_modules/minimatch/dist/mjs/index.js @@ -596,39 +596,35 @@ export class Minimatch { // the parts match. matchOne(file, pattern, partial = false) { const options = this.options; - // a UNC pattern like //?/c:/* can match a path like c:/x - // and vice versa + // UNC paths like //?/X:/... can match X:/... and vice versa + // Drive letters in absolute drive or unc paths are always compared + // case-insensitively. if (this.isWindows) { - const fileUNC = file[0] === '' && + const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]); + const fileUNC = !fileDrive && + file[0] === '' && file[1] === '' && file[2] === '?' && - typeof file[3] === 'string' && /^[a-z]:$/i.test(file[3]); - const patternUNC = pattern[0] === '' && + const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]); + const patternUNC = !patternDrive && + pattern[0] === '' && pattern[1] === '' && pattern[2] === '?' && typeof pattern[3] === 'string' && /^[a-z]:$/i.test(pattern[3]); - if (fileUNC && patternUNC) { - const fd = file[3]; - const pd = pattern[3]; + const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined; + const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined; + if (typeof fdi === 'number' && typeof pdi === 'number') { + const [fd, pd] = [file[fdi], pattern[pdi]]; if (fd.toLowerCase() === pd.toLowerCase()) { - file[3] = pd; - } - } - else if (patternUNC && typeof file[0] === 'string') { - const pd = pattern[3]; - const fd = file[0]; - if (pd.toLowerCase() === fd.toLowerCase()) { - pattern[3] = fd; - pattern = pattern.slice(3); - } - } - else if (fileUNC && typeof pattern[0] === 'string') { - const fd = file[3]; - if (fd.toLowerCase() === pattern[0].toLowerCase()) { - pattern[0] = fd; - file = file.slice(3); + pattern[pdi] = fd; + if (pdi > fdi) { + pattern = pattern.slice(pdi); + } + else if (fdi > pdi) { + file = file.slice(fdi); + } } } } diff --git a/node_modules/npm/node_modules/minimatch/package.json b/node_modules/npm/node_modules/minimatch/package.json index 06d796a..d5ee74e 100644 --- a/node_modules/npm/node_modules/minimatch/package.json +++ b/node_modules/npm/node_modules/minimatch/package.json @@ -2,7 +2,7 @@ "author": "Isaac Z. Schlueter (http://blog.izs.me)", "name": "minimatch", "description": "a glob matcher in javascript", - "version": "9.0.0", + "version": "9.0.1", "repository": { "type": "git", "url": "git://github.com/isaacs/minimatch.git" diff --git a/node_modules/npm/node_modules/node-gyp/.github/workflows/tests.yml b/node_modules/npm/node_modules/node-gyp/.github/workflows/tests.yml index 8f34d4e..517b2d9 100644 --- a/node_modules/npm/node_modules/node-gyp/.github/workflows/tests.yml +++ b/node_modules/npm/node_modules/node-gyp/.github/workflows/tests.yml @@ -8,13 +8,19 @@ on: pull_request: branches: [ main ] jobs: + Lint_Python: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - run: pip install --user ruff + - run: ruff --format=github --select="E,F,PLC,PLE,UP,W,YTT" --ignore="PLC1901,S101,UP031" --target-version=py37 . Tests: strategy: fail-fast: false max-parallel: 15 matrix: - node: [14.x, 16.x, 18.x] - python: ["3.7", "3.9", "3.11"] + node: [16.x, 18.x, 20.x] + python: ["3.8", "3.11"] os: [macos-latest, ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: @@ -33,15 +39,12 @@ jobs: - name: Install Dependencies run: | npm install --no-progress - pip install flake8 pytest + pip install pytest - name: Set Windows environment if: startsWith(matrix.os, 'windows') run: | echo 'GYP_MSVS_VERSION=2015' >> $Env:GITHUB_ENV echo 'GYP_MSVS_OVERRIDE_PATH=C:\\Dummy' >> $Env:GITHUB_ENV - - name: Lint Python - if: startsWith(matrix.os, 'ubuntu') - run: flake8 . --ignore=E203,W503 --max-complexity=101 --max-line-length=88 --show-source --statistics - name: Run Python tests run: python -m pytest # - name: Run doctests with pytest diff --git a/node_modules/npm/node_modules/node-gyp/CHANGELOG.md b/node_modules/npm/node_modules/node-gyp/CHANGELOG.md index 4131521..9fb5f11 100644 --- a/node_modules/npm/node_modules/node-gyp/CHANGELOG.md +++ b/node_modules/npm/node_modules/node-gyp/CHANGELOG.md @@ -1,5 +1,35 @@ # Changelog +## [9.4.0](https://www.github.com/nodejs/node-gyp/compare/v9.3.1...v9.4.0) (2023-06-12) + + +### Features + +* add support for native windows arm64 build tools ([bb76021](https://www.github.com/nodejs/node-gyp/commit/bb76021d35964d2bb125bc6214286f35ae4e6cad)) +* Upgrade Python linting from flake8 to ruff ([#2815](https://www.github.com/nodejs/node-gyp/issues/2815)) ([fc0ddc6](https://www.github.com/nodejs/node-gyp/commit/fc0ddc6523c62b10e5ca1257500b3ceac01450a7)) + + +### Bug Fixes + +* extract tarball to temp directory on Windows ([#2846](https://www.github.com/nodejs/node-gyp/issues/2846)) ([aaa117c](https://www.github.com/nodejs/node-gyp/commit/aaa117c514430aa2c1e568b95df1b6ed1c1fd3b6)) +* log statement is for devDir not nodedir ([#2840](https://www.github.com/nodejs/node-gyp/issues/2840)) ([55048f8](https://www.github.com/nodejs/node-gyp/commit/55048f8be5707c295fb0876306aded75638a8b63)) + + +### Miscellaneous + +* get update-gyp.py to work with Python >= v3.5 ([#2826](https://www.github.com/nodejs/node-gyp/issues/2826)) ([337e8e6](https://www.github.com/nodejs/node-gyp/commit/337e8e68209bd2481cbb11dacce61234dc5c9419)) + + +### Doc + +* docs/README.md add advise about deprecated node-sass ([#2828](https://www.github.com/nodejs/node-gyp/issues/2828)) ([6f3c2d3](https://www.github.com/nodejs/node-gyp/commit/6f3c2d3c6c0de0dbf8c7245f34c2e0b3eea53812)) +* Update README.md ([#2822](https://www.github.com/nodejs/node-gyp/issues/2822)) ([c7927e2](https://www.github.com/nodejs/node-gyp/commit/c7927e228dfde059c93e08c26b54dd8026144583)) + + +### Tests + +* remove deprecated Node.js and Python ([#2868](https://www.github.com/nodejs/node-gyp/issues/2868)) ([a0b3d1c](https://www.github.com/nodejs/node-gyp/commit/a0b3d1c3afed71a74501476fcbc6ee3fface4d13)) + ### [9.3.1](https://www.github.com/nodejs/node-gyp/compare/v9.3.0...v9.3.1) (2022-12-16) diff --git a/node_modules/npm/node_modules/node-gyp/README.md b/node_modules/npm/node_modules/node-gyp/README.md index 7636ad5..99494a3 100644 --- a/node_modules/npm/node_modules/node-gyp/README.md +++ b/node_modules/npm/node_modules/node-gyp/README.md @@ -53,11 +53,12 @@ Install tools and configuration manually: * Install Visual C++ Build Environment: [Visual Studio Build Tools](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=BuildTools) (using "Visual C++ build tools" workload) or [Visual Studio Community](https://visualstudio.microsoft.com/thank-you-downloading-visual-studio/?sku=Community) (using the "Desktop development with C++" workload) - * Launch cmd, `npm config set msvs_version 2017` If the above steps didn't work for you, please visit [Microsoft's Node.js Guidelines for Windows](https://github.com/Microsoft/nodejs-guidelines/blob/master/windows-environment.md#compiling-native-addon-modules) for additional tips. - To target native ARM64 Node.js on Windows 10 on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64". + To target native ARM64 Node.js on Windows on ARM, add the components "Visual C++ compilers and libraries for ARM64" and "Visual C++ ATL for ARM64". + + To use the native ARM64 C++ compiler on Windows on ARM, ensure that you have Visual Studio 2022 [17.4 or later](https://devblogs.microsoft.com/visualstudio/arm64-visual-studio-is-officially-here/) installed. ### Configuring Python Dependency diff --git a/node_modules/npm/node_modules/node-gyp/docs/README.md b/node_modules/npm/node_modules/node-gyp/docs/README.md index 7027960..487fb0a 100644 --- a/node_modules/npm/node_modules/node-gyp/docs/README.md +++ b/node_modules/npm/node_modules/node-gyp/docs/README.md @@ -4,7 +4,15 @@ Please look thru your error log for the string `gyp info using node-gyp@` and if ## `node-sass` is deprecated -Please be aware that the package [`node-sass` is deprecated](https://github.com/sass/node-sass#node-sass) so you should actively seek alternatives. Please avoid opening new `node-sass` issues on this repo. You can try `npm install --global node-sass@latest` but we [cannot help much](https://github.com/nodejs/node-gyp/issues?q=is%3Aissue+label%3A%22Node+Sass+--%3E+Dart+Sass%22+) here. +Please be aware that the package [`node-sass` is deprecated](https://github.com/sass/node-sass#node-sass) so you should actively seek alternatives. You can try: +``` +npm uninstall node-sass +npm install sass --save +# or ... +npm install --global node-sass@latest +``` +`node-sass` projects _may_ work by downgrading to Node.js v14 but [that release is end-of-life](https://github.com/nodejs/release#release-schedule). +But in any case, please avoid opening new `node-sass` issues on this repo because we [cannot help much](https://github.com/nodejs/node-gyp/issues?q=is%3Aissue+label%3A%22Node+Sass+--%3E+Dart+Sass%22+). ## Issues finding the installed Visual Studio diff --git a/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py index 1ff0dc8..a851b4d 100644 --- a/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py +++ b/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py @@ -24,7 +24,7 @@ import gyp.common import gyp.msvs_emulation import shlex -import xml.etree.cElementTree as ET +import xml.etree.ElementTree as ET generator_wants_static_library_dependencies_adjusted = False diff --git a/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py index 0e941eb..4e0ec5e 100644 --- a/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py +++ b/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py @@ -2770,7 +2770,7 @@ def __init__(self, properties=None, id=None, parent=None, path=None): self.path = path self._other_pbxprojects = {} # super - return XCContainerPortal.__init__(self, properties, id, parent) + XCContainerPortal.__init__(self, properties, id, parent) def Name(self): name = self.path diff --git a/node_modules/npm/node_modules/node-gyp/lib/find-visualstudio.js b/node_modules/npm/node_modules/node-gyp/lib/find-visualstudio.js index d381511..16f6e79 100644 --- a/node_modules/npm/node_modules/node-gyp/lib/find-visualstudio.js +++ b/node_modules/npm/node_modules/node-gyp/lib/find-visualstudio.js @@ -266,10 +266,15 @@ VisualStudioFinder.prototype = { return {} }, + msBuildPathExists: function msBuildPathExists (path) { + return fs.existsSync(path) + }, + // Helper - process MSBuild information getMSBuild: function getMSBuild (info, versionYear) { const pkg = 'Microsoft.VisualStudio.VC.MSBuild.Base' const msbuildPath = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'MSBuild.exe') + const msbuildPathArm64 = path.join(info.path, 'MSBuild', 'Current', 'Bin', 'arm64', 'MSBuild.exe') if (info.packages.indexOf(pkg) !== -1) { this.log.silly('- found VC.MSBuild.Base') if (versionYear === 2017) { @@ -279,8 +284,14 @@ VisualStudioFinder.prototype = { return msbuildPath } } - // visual studio 2022 don't has msbuild pkg - if (fs.existsSync(msbuildPath)) { + /** + * Visual Studio 2022 doesn't have the MSBuild package. + * Support for compiling _on_ ARM64 was added in MSVC 14.32.31326, + * so let's leverage it if the user has an ARM64 device. + */ + if (process.arch === 'arm64' && this.msBuildPathExists(msbuildPathArm64)) { + return msbuildPathArm64 + } else if (this.msBuildPathExists(msbuildPath)) { return msbuildPath } return null diff --git a/node_modules/npm/node_modules/node-gyp/lib/install.js b/node_modules/npm/node_modules/node-gyp/lib/install.js index 99f6d85..1eb9f14 100644 --- a/node_modules/npm/node_modules/node-gyp/lib/install.js +++ b/node_modules/npm/node_modules/node-gyp/lib/install.js @@ -2,6 +2,8 @@ const fs = require('graceful-fs') const os = require('os') +const { backOff } = require('exponential-backoff') +const rm = require('rimraf') const tar = require('tar') const path = require('path') const util = require('util') @@ -20,6 +22,10 @@ const streamPipeline = util.promisify(stream.pipeline) async function install (fs, gyp, argv) { const release = processRelease(argv, gyp, process.version, process.release) + // Detecting target_arch based on logic from create-cnfig-gyp.js. Used on Windows only. + const arch = win ? (gyp.opts.target_arch || gyp.opts.arch || process.arch || 'ia32') : '' + // Used to prevent downloading tarball if only new node.lib is required on Windows. + let shouldDownloadTarball = true // Determine which node dev files version we are installing log.verbose('install', 'input version string %j', release.version) @@ -90,6 +96,26 @@ async function install (fs, gyp, argv) { } } log.verbose('install', 'version is good') + if (win) { + log.verbose('on Windows; need to check node.lib') + const nodeLibPath = path.resolve(devDir, arch, 'node.lib') + try { + await fs.promises.stat(nodeLibPath) + } catch (err) { + if (err.code === 'ENOENT') { + log.verbose('install', `version not already installed for ${arch}, continuing with install`, release.version) + try { + shouldDownloadTarball = false + return await go() + } catch (err) { + return rollback(err) + } + } else if (err.code === 'EACCES') { + return eaccesFallback(err) + } + throw err + } + } } else { try { return await go() @@ -98,15 +124,49 @@ async function install (fs, gyp, argv) { } } + async function copyDirectory (src, dest) { + try { + await fs.promises.stat(src) + } catch { + throw new Error(`Missing source directory for copy: ${src}`) + } + await fs.promises.mkdir(dest, { recursive: true }) + const entries = await fs.promises.readdir(src, { withFileTypes: true }) + for (const entry of entries) { + if (entry.isDirectory()) { + await copyDirectory(path.join(src, entry.name), path.join(dest, entry.name)) + } else if (entry.isFile()) { + // with parallel installs, copying files may cause file errors on + // Windows so use an exponential backoff to resolve collisions + await backOff(async () => { + try { + await fs.promises.copyFile(path.join(src, entry.name), path.join(dest, entry.name)) + } catch (err) { + // if ensure, check if file already exists and that's good enough + if (gyp.opts.ensure && err.code === 'EBUSY') { + try { + await fs.promises.stat(path.join(dest, entry.name)) + return + } catch {} + } + throw err + } + }) + } else { + throw new Error('Unexpected file directory entry type') + } + } + } + async function go () { - log.verbose('ensuring nodedir is created', devDir) + log.verbose('ensuring devDir is created', devDir) // first create the dir for the node dev files try { const created = await fs.promises.mkdir(devDir, { recursive: true }) if (created) { - log.verbose('created nodedir', created) + log.verbose('created devDir', created) } } catch (err) { if (err.code === 'EACCES') { @@ -118,6 +178,7 @@ async function install (fs, gyp, argv) { // now download the node tarball const tarPath = gyp.opts.tarball + let extractErrors = false let extractCount = 0 const contentShasums = {} const expectShasums = {} @@ -136,71 +197,102 @@ async function install (fs, gyp, argv) { return isValid } - // download the tarball and extract! + function onwarn (code, message) { + extractErrors = true + log.error('error while extracting tarball', code, message) + } - if (tarPath) { - await tar.extract({ - file: tarPath, - strip: 1, - filter: isValid, - cwd: devDir - }) - } else { - try { - const res = await download(gyp, release.tarballUrl) + // download the tarball and extract! + // Ommited on Windows if only new node.lib is required - if (res.status !== 200) { - throw new Error(`${res.status} response downloading ${release.tarballUrl}`) - } + // on Windows there can be file errors from tar if parallel installs + // are happening (not uncommon with multiple native modules) so + // extract the tarball to a temp directory first and then copy over + const tarExtractDir = win ? await fs.promises.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-')) : devDir - await streamPipeline( - res.body, - // content checksum - new ShaSum((_, checksum) => { - const filename = path.basename(release.tarballUrl).trim() - contentShasums[filename] = checksum - log.verbose('content checksum', filename, checksum) - }), - tar.extract({ + try { + if (shouldDownloadTarball) { + if (tarPath) { + await tar.extract({ + file: tarPath, strip: 1, - cwd: devDir, - filter: isValid + filter: isValid, + onwarn, + cwd: tarExtractDir }) - ) - } catch (err) { - // something went wrong downloading the tarball? - if (err.code === 'ENOTFOUND') { - throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' + - 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + - 'network settings.') + } else { + try { + const res = await download(gyp, release.tarballUrl) + + if (res.status !== 200) { + throw new Error(`${res.status} response downloading ${release.tarballUrl}`) + } + + await streamPipeline( + res.body, + // content checksum + new ShaSum((_, checksum) => { + const filename = path.basename(release.tarballUrl).trim() + contentShasums[filename] = checksum + log.verbose('content checksum', filename, checksum) + }), + tar.extract({ + strip: 1, + cwd: tarExtractDir, + filter: isValid, + onwarn + }) + ) + } catch (err) { + // something went wrong downloading the tarball? + if (err.code === 'ENOTFOUND') { + throw new Error('This is most likely not a problem with node-gyp or the package itself and\n' + + 'is related to network connectivity. In most cases you are behind a proxy or have bad \n' + + 'network settings.') + } + throw err + } } - throw err - } - } - // invoked after the tarball has finished being extracted - if (extractCount === 0) { - throw new Error('There was a fatal problem while downloading/extracting the tarball') - } + // invoked after the tarball has finished being extracted + if (extractErrors || extractCount === 0) { + throw new Error('There was a fatal problem while downloading/extracting the tarball') + } - log.verbose('tarball', 'done parsing tarball') + log.verbose('tarball', 'done parsing tarball') + } - const installVersionPath = path.resolve(devDir, 'installVersion') - await Promise.all([ + const installVersionPath = path.resolve(tarExtractDir, 'installVersion') + await Promise.all([ // need to download node.lib - ...(win ? downloadNodeLib() : []), - // write the "installVersion" file - fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'), - // Only download SHASUMS.txt if we downloaded something in need of SHA verification - ...(!tarPath || win ? [downloadShasums()] : []) - ]) - - log.verbose('download contents checksum', JSON.stringify(contentShasums)) - // check content shasums - for (const k in contentShasums) { - log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) - if (contentShasums[k] !== expectShasums[k]) { - throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]) + ...(win ? [downloadNodeLib()] : []), + // write the "installVersion" file + fs.promises.writeFile(installVersionPath, gyp.package.installVersion + '\n'), + // Only download SHASUMS.txt if we downloaded something in need of SHA verification + ...(!tarPath || win ? [downloadShasums()] : []) + ]) + + log.verbose('download contents checksum', JSON.stringify(contentShasums)) + // check content shasums + for (const k in contentShasums) { + log.verbose('validating download checksum for ' + k, '(%s == %s)', contentShasums[k], expectShasums[k]) + if (contentShasums[k] !== expectShasums[k]) { + throw new Error(k + ' local checksum ' + contentShasums[k] + ' not match remote ' + expectShasums[k]) + } + } + + // copy over the files from the temp tarball extract directory to devDir + if (tarExtractDir !== devDir) { + await copyDirectory(tarExtractDir, devDir) + } + } finally { + if (tarExtractDir !== devDir) { + try { + // try to cleanup temp dir + await util.promisify(rm)(tarExtractDir) + } catch { + log.warn('failed to clean up temp tarball extract directory') + } } } @@ -228,43 +320,33 @@ async function install (fs, gyp, argv) { log.verbose('checksum data', JSON.stringify(expectShasums)) } - function downloadNodeLib () { + async function downloadNodeLib () { log.verbose('on Windows; need to download `' + release.name + '.lib`...') - const archs = ['ia32', 'x64', 'arm64'] - return archs.map(async (arch) => { - const dir = path.resolve(devDir, arch) - const targetLibPath = path.resolve(dir, release.name + '.lib') - const { libUrl, libPath } = release[arch] - const name = `${arch} ${release.name}.lib` - log.verbose(name, 'dir', dir) - log.verbose(name, 'url', libUrl) - - await fs.promises.mkdir(dir, { recursive: true }) - log.verbose('streaming', name, 'to:', targetLibPath) - - const res = await download(gyp, libUrl) - - if (res.status === 403 || res.status === 404) { - if (arch === 'arm64') { - // Arm64 is a newer platform on Windows and not all node distributions provide it. - log.verbose(`${name} was not found in ${libUrl}`) - } else { - log.warn(`${name} was not found in ${libUrl}`) - } - return - } else if (res.status !== 200) { - throw new Error(`${res.status} status code downloading ${name}`) - } + const dir = path.resolve(tarExtractDir, arch) + const targetLibPath = path.resolve(dir, release.name + '.lib') + const { libUrl, libPath } = release[arch] + const name = `${arch} ${release.name}.lib` + log.verbose(name, 'dir', dir) + log.verbose(name, 'url', libUrl) + + await fs.promises.mkdir(dir, { recursive: true }) + log.verbose('streaming', name, 'to:', targetLibPath) + + const res = await download(gyp, libUrl) + + // Since only required node.lib is downloaded throw error if it is not fetched + if (res.status !== 200) { + throw new Error(`${res.status} status code downloading ${name}`) + } - return streamPipeline( - res.body, - new ShaSum((_, checksum) => { - contentShasums[libPath] = checksum - log.verbose('content checksum', libPath, checksum) - }), - fs.createWriteStream(targetLibPath) - ) - }) + return streamPipeline( + res.body, + new ShaSum((_, checksum) => { + contentShasums[libPath] = checksum + log.verbose('content checksum', libPath, checksum) + }), + fs.createWriteStream(targetLibPath) + ) } // downloadNodeLib() } // go() diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md deleted file mode 100644 index 5fc208f..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md +++ /dev/null @@ -1,20 +0,0 @@ - - -ISC License - -Copyright npm, Inc. - -Permission to use, copy, modify, and/or distribute this -software for any purpose with or without fee is hereby -granted, provided that the above copyright notice and this -permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL -WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO -EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, -WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER -TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js deleted file mode 100644 index cb5982f..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js +++ /dev/null @@ -1,20 +0,0 @@ -// given an input that may or may not be an object, return an object that has -// a copy of every defined property listed in 'copy'. if the input is not an -// object, assign it to the property named by 'wrap' -const getOptions = (input, { copy, wrap }) => { - const result = {} - - if (input && typeof input === 'object') { - for (const prop of copy) { - if (input[prop] !== undefined) { - result[prop] = input[prop] - } - } - } else { - result[wrap] = input - } - - return result -} - -module.exports = getOptions diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js deleted file mode 100644 index 4d13bc0..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js +++ /dev/null @@ -1,9 +0,0 @@ -const semver = require('semver') - -const satisfies = (range) => { - return semver.satisfies(process.version, range, { includePrerelease: true }) -} - -module.exports = { - satisfies, -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js deleted file mode 100644 index 3704aa6..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner-sync.js +++ /dev/null @@ -1,96 +0,0 @@ -const { dirname, resolve } = require('path') -const url = require('url') - -const fs = require('../fs.js') - -// given a path, find the owner of the nearest parent -const find = (path) => { - // if we have no getuid, permissions are irrelevant on this platform - if (!process.getuid) { - return {} - } - - // fs methods accept URL objects with a scheme of file: so we need to unwrap - // those into an actual path string before we can resolve it - const resolved = path != null && path.href && path.origin - ? resolve(url.fileURLToPath(path)) - : resolve(path) - - let stat - - try { - stat = fs.lstatSync(resolved) - } finally { - // if we got a stat, return its contents - if (stat) { - return { uid: stat.uid, gid: stat.gid } - } - - // try the parent directory - if (resolved !== dirname(resolved)) { - return find(dirname(resolved)) - } - - // no more parents, never got a stat, just return an empty object - return {} - } -} - -// given a path, uid, and gid update the ownership of the path if necessary -const update = (path, uid, gid) => { - // nothing to update, just exit - if (uid === undefined && gid === undefined) { - return - } - - try { - // see if the permissions are already the same, if they are we don't - // need to do anything, so return early - const stat = fs.statSync(path) - if (uid === stat.uid && gid === stat.gid) { - return - } - } catch { - // ignore errors - } - - try { - fs.chownSync(path, uid, gid) - } catch { - // ignore errors - } -} - -// accepts a `path` and the `owner` property of an options object and normalizes -// it into an object with numerical `uid` and `gid` -const validate = (path, input) => { - let uid - let gid - - if (typeof input === 'string' || typeof input === 'number') { - uid = input - gid = input - } else if (input && typeof input === 'object') { - uid = input.uid - gid = input.gid - } - - if (uid === 'inherit' || gid === 'inherit') { - const owner = find(path) - if (uid === 'inherit') { - uid = owner.uid - } - - if (gid === 'inherit') { - gid = owner.gid - } - } - - return { uid, gid } -} - -module.exports = { - find, - update, - validate, -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js deleted file mode 100644 index 9f02d41..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/owner.js +++ /dev/null @@ -1,96 +0,0 @@ -const { dirname, resolve } = require('path') -const url = require('url') - -const fs = require('../fs.js') - -// given a path, find the owner of the nearest parent -const find = async (path) => { - // if we have no getuid, permissions are irrelevant on this platform - if (!process.getuid) { - return {} - } - - // fs methods accept URL objects with a scheme of file: so we need to unwrap - // those into an actual path string before we can resolve it - const resolved = path != null && path.href && path.origin - ? resolve(url.fileURLToPath(path)) - : resolve(path) - - let stat - - try { - stat = await fs.lstat(resolved) - } finally { - // if we got a stat, return its contents - if (stat) { - return { uid: stat.uid, gid: stat.gid } - } - - // try the parent directory - if (resolved !== dirname(resolved)) { - return find(dirname(resolved)) - } - - // no more parents, never got a stat, just return an empty object - return {} - } -} - -// given a path, uid, and gid update the ownership of the path if necessary -const update = async (path, uid, gid) => { - // nothing to update, just exit - if (uid === undefined && gid === undefined) { - return - } - - try { - // see if the permissions are already the same, if they are we don't - // need to do anything, so return early - const stat = await fs.stat(path) - if (uid === stat.uid && gid === stat.gid) { - return - } - } catch { - // ignore errors - } - - try { - await fs.chown(path, uid, gid) - } catch { - // ignore errors - } -} - -// accepts a `path` and the `owner` property of an options object and normalizes -// it into an object with numerical `uid` and `gid` -const validate = async (path, input) => { - let uid - let gid - - if (typeof input === 'string' || typeof input === 'number') { - uid = input - gid = input - } else if (input && typeof input === 'object') { - uid = input.uid - gid = input.gid - } - - if (uid === 'inherit' || gid === 'inherit') { - const owner = await find(path) - if (uid === 'inherit') { - uid = owner.uid - } - - if (gid === 'inherit') { - gid = owner.gid - } - } - - return { uid, gid } -} - -module.exports = { - find, - update, - validate, -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js deleted file mode 100644 index 8888266..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/copy-file.js +++ /dev/null @@ -1,16 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const copyFile = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['mode'], - wrap: 'mode', - }) - - // the node core method as of 16.5.0 does not support the mode being in an - // object, so we have to pass the mode value directly - return withOwner(dest, () => fs.copyFile(src, dest, options.mode), opts) -} - -module.exports = copyFile diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE deleted file mode 100644 index 93546df..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -(The MIT License) - -Copyright (c) 2011-2017 JP Richardson - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files -(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, - merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE -WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS -OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, - ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js deleted file mode 100644 index 5da4739..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('../fs.js') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 16.7.0 added fs.cp -const useNative = node.satisfies('>=16.7.0') - -const cp = async (src, dest, opts) => { - const options = getOptions(opts, { - copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.cp(src, dest, options) - : polyfill(src, dest, options) -} - -module.exports = cp diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js deleted file mode 100644 index f83ccbf..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js +++ /dev/null @@ -1,428 +0,0 @@ -// this file is a modified version of the code in node 17.2.0 -// which is, in turn, a modified version of the fs-extra module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -'use strict' - -const { - ERR_FS_CP_DIR_TO_NON_DIR, - ERR_FS_CP_EEXIST, - ERR_FS_CP_EINVAL, - ERR_FS_CP_FIFO_PIPE, - ERR_FS_CP_NON_DIR_TO_DIR, - ERR_FS_CP_SOCKET, - ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, - ERR_FS_CP_UNKNOWN, - ERR_FS_EISDIR, - ERR_INVALID_ARG_TYPE, -} = require('../errors.js') -const { - constants: { - errno: { - EEXIST, - EISDIR, - EINVAL, - ENOTDIR, - }, - }, -} = require('os') -const { - chmod, - copyFile, - lstat, - mkdir, - readdir, - readlink, - stat, - symlink, - unlink, - utimes, -} = require('../fs.js') -const { - dirname, - isAbsolute, - join, - parse, - resolve, - sep, - toNamespacedPath, -} = require('path') -const { fileURLToPath } = require('url') - -const defaultOptions = { - dereference: false, - errorOnExist: false, - filter: undefined, - force: true, - preserveTimestamps: false, - recursive: false, -} - -async function cp (src, dest, opts) { - if (opts != null && typeof opts !== 'object') { - throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) - } - return cpFn( - toNamespacedPath(getValidatedPath(src)), - toNamespacedPath(getValidatedPath(dest)), - { ...defaultOptions, ...opts }) -} - -function getValidatedPath (fileURLOrPath) { - const path = fileURLOrPath != null && fileURLOrPath.href - && fileURLOrPath.origin - ? fileURLToPath(fileURLOrPath) - : fileURLOrPath - return path -} - -async function cpFn (src, dest, opts) { - // Warn about using preserveTimestamps on 32-bit node - // istanbul ignore next - if (opts.preserveTimestamps && process.arch === 'ia32') { - const warning = 'Using the preserveTimestamps option in 32-bit ' + - 'node is not recommended' - process.emitWarning(warning, 'TimestampPrecisionWarning') - } - const stats = await checkPaths(src, dest, opts) - const { srcStat, destStat } = stats - await checkParentPaths(src, srcStat, dest) - if (opts.filter) { - return handleFilter(checkParentDir, destStat, src, dest, opts) - } - return checkParentDir(destStat, src, dest, opts) -} - -async function checkPaths (src, dest, opts) { - const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) - if (destStat) { - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: 'src and dest cannot be the same', - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - if (srcStat.isDirectory() && !destStat.isDirectory()) { - throw new ERR_FS_CP_DIR_TO_NON_DIR({ - message: `cannot overwrite directory ${src} ` + - `with non-directory ${dest}`, - path: dest, - syscall: 'cp', - errno: EISDIR, - }) - } - if (!srcStat.isDirectory() && destStat.isDirectory()) { - throw new ERR_FS_CP_NON_DIR_TO_DIR({ - message: `cannot overwrite non-directory ${src} ` + - `with directory ${dest}`, - path: dest, - syscall: 'cp', - errno: ENOTDIR, - }) - } - } - - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return { srcStat, destStat } -} - -function areIdentical (srcStat, destStat) { - return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && - destStat.dev === srcStat.dev -} - -function getStats (src, dest, opts) { - const statFunc = opts.dereference ? - (file) => stat(file, { bigint: true }) : - (file) => lstat(file, { bigint: true }) - return Promise.all([ - statFunc(src), - statFunc(dest).catch((err) => { - // istanbul ignore next: unsure how to cover. - if (err.code === 'ENOENT') { - return null - } - // istanbul ignore next: unsure how to cover. - throw err - }), - ]) -} - -async function checkParentDir (destStat, src, dest, opts) { - const destParent = dirname(dest) - const dirExists = await pathExists(destParent) - if (dirExists) { - return getStatsForCopy(destStat, src, dest, opts) - } - await mkdir(destParent, { recursive: true }) - return getStatsForCopy(destStat, src, dest, opts) -} - -function pathExists (dest) { - return stat(dest).then( - () => true, - // istanbul ignore next: not sure when this would occur - (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) -} - -// Recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -async function checkParentPaths (src, srcStat, dest) { - const srcParent = resolve(dirname(src)) - const destParent = resolve(dirname(dest)) - if (destParent === srcParent || destParent === parse(destParent).root) { - return - } - let destStat - try { - destStat = await stat(destParent, { bigint: true }) - } catch (err) { - // istanbul ignore else: not sure when this would occur - if (err.code === 'ENOENT') { - return - } - // istanbul ignore next: not sure when this would occur - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${src} to a subdirectory of self ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return checkParentPaths(src, srcStat, destParent) -} - -const normalizePathToArray = (path) => - resolve(path).split(sep).filter(Boolean) - -// Return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = normalizePathToArray(src) - const destArr = normalizePathToArray(dest) - return srcArr.every((cur, i) => destArr[i] === cur) -} - -async function handleFilter (onInclude, destStat, src, dest, opts, cb) { - const include = await opts.filter(src, dest) - if (include) { - return onInclude(destStat, src, dest, opts, cb) - } -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter) { - return handleFilter(getStatsForCopy, destStat, src, dest, opts) - } - return getStatsForCopy(destStat, src, dest, opts) -} - -async function getStatsForCopy (destStat, src, dest, opts) { - const statFn = opts.dereference ? stat : lstat - const srcStat = await statFn(src) - // istanbul ignore else: can't portably test FIFO - if (srcStat.isDirectory() && opts.recursive) { - return onDir(srcStat, destStat, src, dest, opts) - } else if (srcStat.isDirectory()) { - throw new ERR_FS_EISDIR({ - message: `${src} is a directory (not copied)`, - path: src, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) { - return onFile(srcStat, destStat, src, dest, opts) - } else if (srcStat.isSymbolicLink()) { - return onLink(destStat, src, dest) - } else if (srcStat.isSocket()) { - throw new ERR_FS_CP_SOCKET({ - message: `cannot copy a socket file: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } else if (srcStat.isFIFO()) { - throw new ERR_FS_CP_FIFO_PIPE({ - message: `cannot copy a FIFO pipe: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // istanbul ignore next: should be unreachable - throw new ERR_FS_CP_UNKNOWN({ - message: `cannot copy an unknown file type: ${dest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return _copyFile(srcStat, src, dest, opts) - } - return mayCopyFile(srcStat, src, dest, opts) -} - -async function mayCopyFile (srcStat, src, dest, opts) { - if (opts.force) { - await unlink(dest) - return _copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new ERR_FS_CP_EEXIST({ - message: `${dest} already exists`, - path: dest, - syscall: 'cp', - errno: EEXIST, - }) - } -} - -async function _copyFile (srcStat, src, dest, opts) { - await copyFile(src, dest) - if (opts.preserveTimestamps) { - return handleTimestampsAndMode(srcStat.mode, src, dest) - } - return setDestMode(dest, srcStat.mode) -} - -async function handleTimestampsAndMode (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - await makeFileWritable(dest, srcMode) - return setDestTimestampsAndMode(srcMode, src, dest) - } - return setDestTimestampsAndMode(srcMode, src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -async function setDestTimestampsAndMode (srcMode, src, dest) { - await setDestTimestamps(src, dest) - return setDestMode(dest, srcMode) -} - -function setDestMode (dest, srcMode) { - return chmod(dest, srcMode) -} - -async function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = await stat(src) - return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) { - return mkDirAndCopy(srcStat.mode, src, dest, opts) - } - return copyDir(src, dest, opts) -} - -async function mkDirAndCopy (srcMode, src, dest, opts) { - await mkdir(dest) - await copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -async function copyDir (src, dest, opts) { - const dir = await readdir(src) - for (let i = 0; i < dir.length; i++) { - const item = dir[i] - const srcItem = join(src, item) - const destItem = join(dest, item) - const { destStat } = await checkPaths(srcItem, destItem, opts) - await startCopy(destStat, srcItem, destItem, opts) - } -} - -async function onLink (destStat, src, dest) { - let resolvedSrc = await readlink(src) - if (!isAbsolute(resolvedSrc)) { - resolvedSrc = resolve(dirname(src), resolvedSrc) - } - if (!destStat) { - return symlink(resolvedSrc, dest) - } - let resolvedDest - try { - resolvedDest = await readlink(dest) - } catch (err) { - // Dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - // istanbul ignore next: can only test on windows - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { - return symlink(resolvedSrc, dest) - } - // istanbul ignore next: should not be possible - throw err - } - if (!isAbsolute(resolvedDest)) { - resolvedDest = resolve(dirname(dest), resolvedDest) - } - if (isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new ERR_FS_CP_EINVAL({ - message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + - `${resolvedDest}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - // Do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - const srcStat = await stat(src) - if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ - message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, - path: dest, - syscall: 'cp', - errno: EINVAL, - }) - } - return copyLink(resolvedSrc, dest) -} - -async function copyLink (resolvedSrc, dest) { - await unlink(dest) - return symlink(resolvedSrc, dest) -} - -module.exports = cp diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js deleted file mode 100644 index 1cd1e05..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/errors.js +++ /dev/null @@ -1,129 +0,0 @@ -'use strict' -const { inspect } = require('util') - -// adapted from node's internal/errors -// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js - -// close copy of node's internal SystemError class. -class SystemError { - constructor (code, prefix, context) { - // XXX context.code is undefined in all constructors used in cp/polyfill - // that may be a bug copied from node, maybe the constructor should use - // `code` not `errno`? nodejs/node#41104 - let message = `${prefix}: ${context.syscall} returned ` + - `${context.code} (${context.message})` - - if (context.path !== undefined) { - message += ` ${context.path}` - } - if (context.dest !== undefined) { - message += ` => ${context.dest}` - } - - this.code = code - Object.defineProperties(this, { - name: { - value: 'SystemError', - enumerable: false, - writable: true, - configurable: true, - }, - message: { - value: message, - enumerable: false, - writable: true, - configurable: true, - }, - info: { - value: context, - enumerable: true, - configurable: true, - writable: false, - }, - errno: { - get () { - return context.errno - }, - set (value) { - context.errno = value - }, - enumerable: true, - configurable: true, - }, - syscall: { - get () { - return context.syscall - }, - set (value) { - context.syscall = value - }, - enumerable: true, - configurable: true, - }, - }) - - if (context.path !== undefined) { - Object.defineProperty(this, 'path', { - get () { - return context.path - }, - set (value) { - context.path = value - }, - enumerable: true, - configurable: true, - }) - } - - if (context.dest !== undefined) { - Object.defineProperty(this, 'dest', { - get () { - return context.dest - }, - set (value) { - context.dest = value - }, - enumerable: true, - configurable: true, - }) - } - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } - - [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { - return inspect(this, { - ...ctx, - getters: true, - customInspect: false, - }) - } -} - -function E (code, message) { - module.exports[code] = class NodeError extends SystemError { - constructor (ctx) { - super(code, message, ctx) - } - } -} - -E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') -E('ERR_FS_CP_EEXIST', 'Target already exists') -E('ERR_FS_CP_EINVAL', 'Invalid src or dest') -E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') -E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') -E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') -E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') -E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') -E('ERR_FS_EISDIR', 'Path is a directory') - -module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { - constructor (name, expected, actual) { - super() - this.code = 'ERR_INVALID_ARG_TYPE' - this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js deleted file mode 100644 index 457da10..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/fs.js +++ /dev/null @@ -1,14 +0,0 @@ -const fs = require('fs') -const promisify = require('@gar/promisify') - -const isLower = (s) => s === s.toLowerCase() && s !== s.toUpperCase() - -const fsSync = Object.fromEntries(Object.entries(fs).filter(([k, v]) => - typeof v === 'function' && (k.endsWith('Sync') || !isLower(k[0])) -)) - -// this module returns the core fs async fns wrapped in a proxy that promisifies -// method calls within the getter. we keep it in a separate module so that the -// overridden methods have a consistent way to get to promisified fs methods -// without creating a circular dependency. the ctors and sync methods are kept untouched -module.exports = { ...promisify(fs), ...fsSync } diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js deleted file mode 100644 index 3a98648..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js +++ /dev/null @@ -1,12 +0,0 @@ -module.exports = { - ...require('./fs.js'), - copyFile: require('./copy-file.js'), - cp: require('./cp/index.js'), - mkdir: require('./mkdir.js'), - mkdtemp: require('./mkdtemp.js'), - rm: require('./rm/index.js'), - withTempDir: require('./with-temp-dir.js'), - withOwner: require('./with-owner.js'), - withOwnerSync: require('./with-owner-sync.js'), - writeFile: require('./write-file.js'), -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js deleted file mode 100644 index 098d8d0..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdir.js +++ /dev/null @@ -1,19 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -// extends mkdir with the ability to specify an owner of the new dir -const mkdir = async (path, opts) => { - const options = getOptions(opts, { - copy: ['mode', 'recursive'], - wrap: 'mode', - }) - - return withOwner( - path, - () => fs.mkdir(path, options), - opts - ) -} - -module.exports = mkdir diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js deleted file mode 100644 index 60b12a7..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/mkdtemp.js +++ /dev/null @@ -1,23 +0,0 @@ -const { dirname, sep } = require('path') - -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const mkdtemp = async (prefix, opts) => { - const options = getOptions(opts, { - copy: ['encoding'], - wrap: 'encoding', - }) - - // mkdtemp relies on the trailing path separator to indicate if it should - // create a directory inside of the prefix. if that's the case then the root - // we infer ownership from is the prefix itself, otherwise it's the dirname - // /tmp -> /tmpABCDEF, infers from / - // /tmp/ -> /tmp/ABCDEF, infers from /tmp - const root = prefix.endsWith(sep) ? prefix : dirname(prefix) - - return withOwner(root, () => fs.mkdtemp(prefix, options), opts) -} - -module.exports = mkdtemp diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js deleted file mode 100644 index cb81fbd..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/index.js +++ /dev/null @@ -1,22 +0,0 @@ -const fs = require('../fs.js') -const getOptions = require('../common/get-options.js') -const node = require('../common/node.js') -const polyfill = require('./polyfill.js') - -// node 14.14.0 added fs.rm, which allows both the force and recursive options -const useNative = node.satisfies('>=14.14.0') - -const rm = async (path, opts) => { - const options = getOptions(opts, { - copy: ['retryDelay', 'maxRetries', 'recursive', 'force'], - }) - - // the polyfill is tested separately from this module, no need to hack - // process.version to try to trigger it just for coverage - // istanbul ignore next - return useNative - ? fs.rm(path, options) - : polyfill(path, options) -} - -module.exports = rm diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js deleted file mode 100644 index a25c174..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/rm/polyfill.js +++ /dev/null @@ -1,239 +0,0 @@ -// this file is a modified version of the code in node core >=14.14.0 -// which is, in turn, a modified version of the rimraf module on npm -// node core changes: -// - Use of the assert module has been replaced with core's error system. -// - All code related to the glob dependency has been removed. -// - Bring your own custom fs module is not currently supported. -// - Some basic code cleanup. -// changes here: -// - remove all callback related code -// - drop sync support -// - change assertions back to non-internal methods (see options.js) -// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows -const errnos = require('os').constants.errno -const { join } = require('path') -const fs = require('../fs.js') - -// error codes that mean we need to remove contents -const notEmptyCodes = new Set([ - 'ENOTEMPTY', - 'EEXIST', - 'EPERM', -]) - -// error codes we can retry later -const retryCodes = new Set([ - 'EBUSY', - 'EMFILE', - 'ENFILE', - 'ENOTEMPTY', - 'EPERM', -]) - -const isWindows = process.platform === 'win32' - -const defaultOptions = { - retryDelay: 100, - maxRetries: 0, - recursive: false, - force: false, -} - -// this is drastically simplified, but should be roughly equivalent to what -// node core throws -class ERR_FS_EISDIR extends Error { - constructor (path) { - super() - this.info = { - code: 'EISDIR', - message: 'is a directory', - path, - syscall: 'rm', - errno: errnos.EISDIR, - } - this.name = 'SystemError' - this.code = 'ERR_FS_EISDIR' - this.errno = errnos.EISDIR - this.syscall = 'rm' - this.path = path - this.message = `Path is a directory: ${this.syscall} returned ` + - `${this.info.code} (is a directory) ${path}` - } - - toString () { - return `${this.name} [${this.code}]: ${this.message}` - } -} - -class ENOTDIR extends Error { - constructor (path) { - super() - this.name = 'Error' - this.code = 'ENOTDIR' - this.errno = errnos.ENOTDIR - this.syscall = 'rmdir' - this.path = path - this.message = `not a directory, ${this.syscall} '${this.path}'` - } - - toString () { - return `${this.name}: ${this.code}: ${this.message}` - } -} - -// force is passed separately here because we respect it for the first entry -// into rimraf only, any further calls that are spawned as a result (i.e. to -// delete content within the target) will ignore ENOENT errors -const rimraf = async (path, options, isTop = false) => { - const force = isTop ? options.force : true - const stat = await fs.lstat(path) - .catch((err) => { - // we only ignore ENOENT if we're forcing this call - if (err.code === 'ENOENT' && force) { - return - } - - if (isWindows && err.code === 'EPERM') { - return fixEPERM(path, options, err, isTop) - } - - throw err - }) - - // no stat object here means either lstat threw an ENOENT, or lstat threw - // an EPERM and the fixPERM function took care of things. either way, we're - // already done, so return early - if (!stat) { - return - } - - if (stat.isDirectory()) { - return rmdir(path, options, null, isTop) - } - - return fs.unlink(path) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return - } - - if (err.code === 'EISDIR') { - return rmdir(path, options, err, isTop) - } - - if (err.code === 'EPERM') { - // in windows, we handle this through fixEPERM which will also try to - // delete things again. everywhere else since deleting the target as a - // file didn't work we go ahead and try to delete it as a directory - return isWindows - ? fixEPERM(path, options, err, isTop) - : rmdir(path, options, err, isTop) - } - - throw err - }) -} - -const fixEPERM = async (path, options, originalErr, isTop) => { - const force = isTop ? options.force : true - const targetMissing = await fs.chmod(path, 0o666) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return true - } - - throw originalErr - }) - - // got an ENOENT above, return now. no file = no problem - if (targetMissing) { - return - } - - // this function does its own lstat rather than calling rimraf again to avoid - // infinite recursion for a repeating EPERM - const stat = await fs.lstat(path) - .catch((err) => { - if (err.code === 'ENOENT' && force) { - return - } - - throw originalErr - }) - - if (!stat) { - return - } - - if (stat.isDirectory()) { - return rmdir(path, options, originalErr, isTop) - } - - return fs.unlink(path) -} - -const rmdir = async (path, options, originalErr, isTop) => { - if (!options.recursive && isTop) { - throw originalErr || new ERR_FS_EISDIR(path) - } - const force = isTop ? options.force : true - - return fs.rmdir(path) - .catch(async (err) => { - // in Windows, calling rmdir on a file path will fail with ENOENT rather - // than ENOTDIR. to determine if that's what happened, we have to do - // another lstat on the path. if the path isn't actually gone, we throw - // away the ENOENT and replace it with our own ENOTDIR - if (isWindows && err.code === 'ENOENT') { - const stillExists = await fs.lstat(path).then(() => true, () => false) - if (stillExists) { - err = new ENOTDIR(path) - } - } - - // not there, not a problem - if (err.code === 'ENOENT' && force) { - return - } - - // we may not have originalErr if lstat tells us our target is a - // directory but that changes before we actually remove it, so - // only throw it here if it's set - if (originalErr && err.code === 'ENOTDIR') { - throw originalErr - } - - // the directory isn't empty, remove the contents and try again - if (notEmptyCodes.has(err.code)) { - const files = await fs.readdir(path) - await Promise.all(files.map((file) => { - const target = join(path, file) - return rimraf(target, options) - })) - return fs.rmdir(path) - } - - throw err - }) -} - -const rm = async (path, opts) => { - const options = { ...defaultOptions, ...opts } - let retries = 0 - - const errHandler = async (err) => { - if (retryCodes.has(err.code) && ++retries < options.maxRetries) { - const delay = retries * options.retryDelay - await promiseTimeout(delay) - return rimraf(path, options, true).catch(errHandler) - } - - throw err - } - - return rimraf(path, options, true).catch(errHandler) -} - -const promiseTimeout = (ms) => new Promise((r) => setTimeout(r, ms)) - -module.exports = rm diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js deleted file mode 100644 index 3597d1c..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner-sync.js +++ /dev/null @@ -1,21 +0,0 @@ -const getOptions = require('./common/get-options.js') -const owner = require('./common/owner-sync.js') - -const withOwnerSync = (path, fn, opts) => { - const options = getOptions(opts, { - copy: ['owner'], - }) - - const { uid, gid } = owner.validate(path, options.owner) - - const result = fn({ uid, gid }) - - owner.update(path, uid, gid) - if (typeof result === 'string') { - owner.update(result, uid, gid) - } - - return result -} - -module.exports = withOwnerSync diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js deleted file mode 100644 index a679102..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-owner.js +++ /dev/null @@ -1,21 +0,0 @@ -const getOptions = require('./common/get-options.js') -const owner = require('./common/owner.js') - -const withOwner = async (path, fn, opts) => { - const options = getOptions(opts, { - copy: ['owner'], - }) - - const { uid, gid } = await owner.validate(path, options.owner) - - const result = await fn({ uid, gid }) - - await Promise.all([ - owner.update(path, uid, gid), - typeof result === 'string' ? owner.update(result, uid, gid) : null, - ]) - - return result -} - -module.exports = withOwner diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js deleted file mode 100644 index 81db59d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js +++ /dev/null @@ -1,41 +0,0 @@ -const { join, sep } = require('path') - -const getOptions = require('./common/get-options.js') -const mkdir = require('./mkdir.js') -const mkdtemp = require('./mkdtemp.js') -const rm = require('./rm/index.js') - -// create a temp directory, ensure its permissions match its parent, then call -// the supplied function passing it the path to the directory. clean up after -// the function finishes, whether it throws or not -const withTempDir = async (root, fn, opts) => { - const options = getOptions(opts, { - copy: ['tmpPrefix'], - }) - // create the directory, and fix its ownership - await mkdir(root, { recursive: true, owner: 'inherit' }) - - const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || ''), { owner: 'inherit' }) - let err - let result - - try { - result = await fn(target) - } catch (_err) { - err = _err - } - - try { - await rm(target, { force: true, recursive: true }) - } catch { - // ignore errors - } - - if (err) { - throw err - } - - return result -} - -module.exports = withTempDir diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js deleted file mode 100644 index ff90057..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/write-file.js +++ /dev/null @@ -1,14 +0,0 @@ -const fs = require('./fs.js') -const getOptions = require('./common/get-options.js') -const withOwner = require('./with-owner.js') - -const writeFile = async (file, data, opts) => { - const options = getOptions(opts, { - copy: ['encoding', 'mode', 'flag', 'signal'], - wrap: 'encoding', - }) - - return withOwner(file, () => fs.writeFile(file, data, options), opts) -} - -module.exports = writeFile diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json deleted file mode 100644 index 1512fd6..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "name": "@npmcli/fs", - "version": "2.1.2", - "description": "filesystem utilities for the npm cli", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "test": "tap", - "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/fs.git" - }, - "keywords": [ - "npm", - "oss" - ], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "dependencies": { - "@gar/promisify": "^1.1.3", - "semver": "^7.3.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md deleted file mode 100644 index 8d28acf..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js deleted file mode 100644 index ad5a76a..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js +++ /dev/null @@ -1,29 +0,0 @@ -'use strict' - -const contentVer = require('../../package.json')['cache-version'].content -const hashToSegments = require('../util/hash-to-segments') -const path = require('path') -const ssri = require('ssri') - -// Current format of content file path: -// -// sha512-BaSE64Hex= -> -// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee -// -module.exports = contentPath - -function contentPath (cache, integrity) { - const sri = ssri.parse(integrity, { single: true }) - // contentPath is the *strongest* algo given - return path.join( - contentDir(cache), - sri.algorithm, - ...hashToSegments(sri.hexDigest()) - ) -} - -module.exports.contentDir = contentDir - -function contentDir (cache) { - return path.join(cache, `content-v${contentVer}`) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js deleted file mode 100644 index 7c20c75..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js +++ /dev/null @@ -1,241 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') -const fsm = require('fs-minipass') -const ssri = require('ssri') -const contentPath = require('./path') -const Pipeline = require('minipass-pipeline') - -module.exports = read - -const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 -async function read (cache, integrity, opts = {}) { - const { size } = opts - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // get size - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && stat.size !== size) { - throw sizeError(size, stat.size) - } - - if (stat.size > MAX_SINGLE_READ_SIZE) { - return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() - } - - const data = await fs.readFile(cpath, { encoding: null }) - if (!ssri.checkData(data, sri)) { - throw integrityError(sri, cpath) - } - - return data -} - -const readPipeline = (cpath, size, sri, stream) => { - stream.push( - new fsm.ReadStream(cpath, { - size, - readSize: MAX_SINGLE_READ_SIZE, - }), - ssri.integrityStream({ - integrity: sri, - size, - }) - ) - return stream -} - -module.exports.sync = readSync - -function readSync (cache, integrity, opts = {}) { - const { size } = opts - return withContentSriSync(cache, integrity, (cpath, sri) => { - const data = fs.readFileSync(cpath, { encoding: null }) - if (typeof size === 'number' && size !== data.length) { - throw sizeError(size, data.length) - } - - if (ssri.checkData(data, sri)) { - return data - } - - throw integrityError(sri, cpath) - }) -} - -module.exports.stream = readStream -module.exports.readStream = readStream - -function readStream (cache, integrity, opts = {}) { - const { size } = opts - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { - // just stat to ensure it exists - const stat = await fs.stat(cpath) - return { stat, cpath, sri } - }) - if (typeof size === 'number' && size !== stat.size) { - return stream.emit('error', sizeError(size, stat.size)) - } - - return readPipeline(cpath, stat.size, sri, stream) - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.copy = copy -module.exports.copy.sync = copySync - -function copy (cache, integrity, dest) { - return withContentSri(cache, integrity, (cpath, sri) => { - return fs.copyFile(cpath, dest) - }) -} - -function copySync (cache, integrity, dest) { - return withContentSriSync(cache, integrity, (cpath, sri) => { - return fs.copyFileSync(cpath, dest) - }) -} - -module.exports.hasContent = hasContent - -async function hasContent (cache, integrity) { - if (!integrity) { - return false - } - - try { - return await withContentSri(cache, integrity, async (cpath, sri) => { - const stat = await fs.stat(cpath) - return { size: stat.size, sri, stat } - }) - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } -} - -module.exports.hasContent.sync = hasContentSync - -function hasContentSync (cache, integrity) { - if (!integrity) { - return false - } - - return withContentSriSync(cache, integrity, (cpath, sri) => { - try { - const stat = fs.statSync(cpath) - return { size: stat.size, sri, stat } - } catch (err) { - if (err.code === 'ENOENT') { - return false - } - - if (err.code === 'EPERM') { - /* istanbul ignore else */ - if (process.platform !== 'win32') { - throw err - } else { - return false - } - } - } - }) -} - -async function withContentSri (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - // Can't use race here because a generic error can happen before - // a ENOENT error, and can happen before a valid result - const results = await Promise.all(digests.map(async (meta) => { - try { - return await withContentSri(cache, meta, fn) - } catch (err) { - if (err.code === 'ENOENT') { - return Object.assign( - new Error('No matching content found for ' + sri.toString()), - { code: 'ENOENT' } - ) - } - return err - } - })) - // Return the first non error if it is found - const result = results.find((r) => !(r instanceof Error)) - if (result) { - return result - } - - // Throw the No matching content found error - const enoentError = results.find((r) => r.code === 'ENOENT') - if (enoentError) { - throw enoentError - } - - // Throw generic error - throw results.find((r) => r instanceof Error) - } -} - -function withContentSriSync (cache, integrity, fn) { - const sri = ssri.parse(integrity) - // If `integrity` has multiple entries, pick the first digest - // with available local data. - const algo = sri.pickAlgorithm() - const digests = sri[algo] - if (digests.length <= 1) { - const cpath = contentPath(cache, digests[0]) - return fn(cpath, digests[0]) - } else { - let lastErr = null - for (const meta of digests) { - try { - return withContentSriSync(cache, meta, fn) - } catch (err) { - lastErr = err - } - } - throw lastErr - } -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function integrityError (sri, path) { - const err = new Error(`Integrity verification failed for ${sri} (${path})`) - err.code = 'EINTEGRITY' - err.sri = sri - err.path = path - return err -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js deleted file mode 100644 index f733305..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js +++ /dev/null @@ -1,20 +0,0 @@ -'use strict' - -const util = require('util') - -const contentPath = require('./path') -const { hasContent } = require('./read') -const rimraf = util.promisify(require('rimraf')) - -module.exports = rm - -async function rm (cache, integrity) { - const content = await hasContent(cache, integrity) - // ~pretty~ sure we can't end up with a content lacking sri, but be safe - if (content && content.sri) { - await rimraf(contentPath(cache, content.sri)) - return true - } else { - return false - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js deleted file mode 100644 index 0e8c0f4..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js +++ /dev/null @@ -1,189 +0,0 @@ -'use strict' - -const events = require('events') -const util = require('util') - -const contentPath = require('./path') -const fixOwner = require('../util/fix-owner') -const fs = require('@npmcli/fs') -const moveFile = require('../util/move-file') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') -const Flush = require('minipass-flush') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') -const fsm = require('fs-minipass') - -module.exports = write - -async function write (cache, data, opts = {}) { - const { algorithms, size, integrity } = opts - if (algorithms && algorithms.length > 1) { - throw new Error('opts.algorithms only supports a single algorithm for now') - } - - if (typeof size === 'number' && data.length !== size) { - throw sizeError(size, data.length) - } - - const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) - if (integrity && !ssri.checkData(data, integrity, opts)) { - throw checksumError(integrity, sri) - } - - const tmp = await makeTmp(cache, opts) - try { - await fs.writeFile(tmp.target, data, { flag: 'wx' }) - await moveToDestination(tmp, cache, sri, opts) - return { integrity: sri, size: data.length } - } finally { - if (!tmp.moved) { - await rimraf(tmp.target) - } - } -} - -module.exports.stream = writeStream - -// writes proxied to the 'inputStream' that is passed to the Promise -// 'end' is deferred until content is handled. -class CacacheWriteStream extends Flush { - constructor (cache, opts) { - super() - this.opts = opts - this.cache = cache - this.inputStream = new Minipass() - this.inputStream.on('error', er => this.emit('error', er)) - this.inputStream.on('drain', () => this.emit('drain')) - this.handleContentP = null - } - - write (chunk, encoding, cb) { - if (!this.handleContentP) { - this.handleContentP = handleContent( - this.inputStream, - this.cache, - this.opts - ) - } - return this.inputStream.write(chunk, encoding, cb) - } - - flush (cb) { - this.inputStream.end(() => { - if (!this.handleContentP) { - const e = new Error('Cache input stream was empty') - e.code = 'ENODATA' - // empty streams are probably emitting end right away. - // defer this one tick by rejecting a promise on it. - return Promise.reject(e).catch(cb) - } - // eslint-disable-next-line promise/catch-or-return - this.handleContentP.then( - (res) => { - res.integrity && this.emit('integrity', res.integrity) - // eslint-disable-next-line promise/always-return - res.size !== null && this.emit('size', res.size) - cb() - }, - (er) => cb(er) - ) - }) - } -} - -function writeStream (cache, opts = {}) { - return new CacacheWriteStream(cache, opts) -} - -async function handleContent (inputStream, cache, opts) { - const tmp = await makeTmp(cache, opts) - try { - const res = await pipeToTmp(inputStream, cache, tmp.target, opts) - await moveToDestination( - tmp, - cache, - res.integrity, - opts - ) - return res - } finally { - if (!tmp.moved) { - await rimraf(tmp.target) - } - } -} - -async function pipeToTmp (inputStream, cache, tmpTarget, opts) { - const outStream = new fsm.WriteStream(tmpTarget, { - flags: 'wx', - }) - - if (opts.integrityEmitter) { - // we need to create these all simultaneously since they can fire in any order - const [integrity, size] = await Promise.all([ - events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), - events.once(opts.integrityEmitter, 'size').then(res => res[0]), - new Pipeline(inputStream, outStream).promise(), - ]) - return { integrity, size } - } - - let integrity - let size - const hashStream = ssri.integrityStream({ - integrity: opts.integrity, - algorithms: opts.algorithms, - size: opts.size, - }) - hashStream.on('integrity', i => { - integrity = i - }) - hashStream.on('size', s => { - size = s - }) - - const pipeline = new Pipeline(inputStream, hashStream, outStream) - await pipeline.promise() - return { integrity, size } -} - -async function makeTmp (cache, opts) { - const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(tmpTarget)) - return { - target: tmpTarget, - moved: false, - } -} - -async function moveToDestination (tmp, cache, sri, opts) { - const destination = contentPath(cache, sri) - const destDir = path.dirname(destination) - - await fixOwner.mkdirfix(cache, destDir) - await moveFile(tmp.target, destination) - tmp.moved = true - await fixOwner.chownr(cache, destination) -} - -function sizeError (expected, found) { - /* eslint-disable-next-line max-len */ - const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) - err.expected = expected - err.found = found - err.code = 'EBADSIZE' - return err -} - -function checksumError (expected, found) { - const err = new Error(`Integrity check failed: - Wanted: ${expected} - Found: ${found}`) - err.code = 'EINTEGRITY' - err.expected = expected - err.found = found - return err -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js deleted file mode 100644 index 1dc73a9..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js +++ /dev/null @@ -1,404 +0,0 @@ -'use strict' - -const util = require('util') -const crypto = require('crypto') -const fs = require('@npmcli/fs') -const Minipass = require('minipass') -const path = require('path') -const ssri = require('ssri') -const uniqueFilename = require('unique-filename') - -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const hashToSegments = require('./util/hash-to-segments') -const indexV = require('../package.json')['cache-version'].index -const moveFile = require('@npmcli/move-file') -const _rimraf = require('rimraf') -const rimraf = util.promisify(_rimraf) -rimraf.sync = _rimraf.sync - -module.exports.NotFoundError = class NotFoundError extends Error { - constructor (cache, key) { - super(`No cache entry for ${key} found in ${cache}`) - this.code = 'ENOENT' - this.cache = cache - this.key = key - } -} - -module.exports.compact = compact - -async function compact (cache, key, matchFn, opts = {}) { - const bucket = bucketPath(cache, key) - const entries = await bucketEntries(bucket) - const newEntries = [] - // we loop backwards because the bottom-most result is the newest - // since we add new entries with appendFile - for (let i = entries.length - 1; i >= 0; --i) { - const entry = entries[i] - // a null integrity could mean either a delete was appended - // or the user has simply stored an index that does not map - // to any content. we determine if the user wants to keep the - // null integrity based on the validateEntry function passed in options. - // if the integrity is null and no validateEntry is provided, we break - // as we consider the null integrity to be a deletion of everything - // that came before it. - if (entry.integrity === null && !opts.validateEntry) { - break - } - - // if this entry is valid, and it is either the first entry or - // the newEntries array doesn't already include an entry that - // matches this one based on the provided matchFn, then we add - // it to the beginning of our list - if ((!opts.validateEntry || opts.validateEntry(entry) === true) && - (newEntries.length === 0 || - !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { - newEntries.unshift(entry) - } - } - - const newIndex = '\n' + newEntries.map((entry) => { - const stringified = JSON.stringify(entry) - const hash = hashEntry(stringified) - return `${hash}\t${stringified}` - }).join('\n') - - const setup = async () => { - const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) - await fixOwner.mkdirfix(cache, path.dirname(target)) - return { - target, - moved: false, - } - } - - const teardown = async (tmp) => { - if (!tmp.moved) { - return rimraf(tmp.target) - } - } - - const write = async (tmp) => { - await fs.writeFile(tmp.target, newIndex, { flag: 'wx' }) - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - // we use @npmcli/move-file directly here because we - // want to overwrite the existing file - await moveFile(tmp.target, bucket) - tmp.moved = true - try { - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - } - - // write the file atomically - const tmp = await setup() - try { - await write(tmp) - } finally { - await teardown(tmp) - } - - // we reverse the list we generated such that the newest - // entries come first in order to make looping through them easier - // the true passed to formatEntry tells it to keep null - // integrity values, if they made it this far it's because - // validateEntry returned true, and as such we should return it - return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) -} - -module.exports.insert = insert - -async function insert (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - try { - await fixOwner.mkdirfix(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - // NOTE - Cleverness ahoy! - // - // This works because it's tremendously unlikely for an entry to corrupt - // another while still preserving the string length of the JSON in - // question. So, we just slap the length in there and verify it on read. - // - // Thanks to @isaacs for the whiteboarding session that ended up with - // this. - await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - await fixOwner.chownr(cache, bucket) - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - - throw err - // There's a class of race conditions that happen when things get deleted - // during fixOwner, or between the two mkdirfix/chownr calls. - // - // It's perfectly fine to just not bother in those cases and lie - // that the index entry was written. Because it's a cache. - } - return formatEntry(cache, entry) -} - -module.exports.insert.sync = insertSync - -function insertSync (cache, key, integrity, opts = {}) { - const { metadata, size } = opts - const bucket = bucketPath(cache, key) - const entry = { - key, - integrity: integrity && ssri.stringify(integrity), - time: Date.now(), - size, - metadata, - } - fixOwner.mkdirfix.sync(cache, path.dirname(bucket)) - const stringified = JSON.stringify(entry) - fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`) - try { - fixOwner.chownr.sync(cache, bucket) - } catch (err) { - if (err.code !== 'ENOENT') { - throw err - } - } - return formatEntry(cache, entry) -} - -module.exports.find = find - -async function find (cache, key) { - const bucket = bucketPath(cache, key) - try { - const entries = await bucketEntries(bucket) - return entries.reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.find.sync = findSync - -function findSync (cache, key) { - const bucket = bucketPath(cache, key) - try { - return bucketEntriesSync(bucket).reduce((latest, next) => { - if (next && next.key === key) { - return formatEntry(cache, next) - } else { - return latest - } - }, null) - } catch (err) { - if (err.code === 'ENOENT') { - return null - } else { - throw err - } - } -} - -module.exports.delete = del - -function del (cache, key, opts = {}) { - if (!opts.removeFully) { - return insert(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf(bucket) -} - -module.exports.delete.sync = delSync - -function delSync (cache, key, opts = {}) { - if (!opts.removeFully) { - return insertSync(cache, key, null, opts) - } - - const bucket = bucketPath(cache, key) - return rimraf.sync(bucket) -} - -module.exports.lsStream = lsStream - -function lsStream (cache) { - const indexDir = bucketDir(cache) - const stream = new Minipass({ objectMode: true }) - - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const buckets = await readdirOrEmpty(indexDir) - await Promise.all(buckets.map(async (bucket) => { - const bucketPath = path.join(indexDir, bucket) - const subbuckets = await readdirOrEmpty(bucketPath) - await Promise.all(subbuckets.map(async (subbucket) => { - const subbucketPath = path.join(bucketPath, subbucket) - - // "/cachename//./*" - const subbucketEntries = await readdirOrEmpty(subbucketPath) - await Promise.all(subbucketEntries.map(async (entry) => { - const entryPath = path.join(subbucketPath, entry) - try { - const entries = await bucketEntries(entryPath) - // using a Map here prevents duplicate keys from showing up - // twice, I guess? - const reduced = entries.reduce((acc, entry) => { - acc.set(entry.key, entry) - return acc - }, new Map()) - // reduced is a map of key => entry - for (const entry of reduced.values()) { - const formatted = formatEntry(cache, entry) - if (formatted) { - stream.write(formatted) - } - } - } catch (err) { - if (err.code === 'ENOENT') { - return undefined - } - throw err - } - })) - })) - })) - stream.end() - return stream - }).catch(err => stream.emit('error', err)) - - return stream -} - -module.exports.ls = ls - -async function ls (cache) { - const entries = await lsStream(cache).collect() - return entries.reduce((acc, xs) => { - acc[xs.key] = xs - return acc - }, {}) -} - -module.exports.bucketEntries = bucketEntries - -async function bucketEntries (bucket, filter) { - const data = await fs.readFile(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -module.exports.bucketEntries.sync = bucketEntriesSync - -function bucketEntriesSync (bucket, filter) { - const data = fs.readFileSync(bucket, 'utf8') - return _bucketEntries(data, filter) -} - -function _bucketEntries (data, filter) { - const entries = [] - data.split('\n').forEach((entry) => { - if (!entry) { - return - } - - const pieces = entry.split('\t') - if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { - // Hash is no good! Corruption or malice? Doesn't matter! - // EJECT EJECT - return - } - let obj - try { - obj = JSON.parse(pieces[1]) - } catch (e) { - // Entry is corrupted! - return - } - if (obj) { - entries.push(obj) - } - }) - return entries -} - -module.exports.bucketDir = bucketDir - -function bucketDir (cache) { - return path.join(cache, `index-v${indexV}`) -} - -module.exports.bucketPath = bucketPath - -function bucketPath (cache, key) { - const hashed = hashKey(key) - return path.join.apply( - path, - [bucketDir(cache)].concat(hashToSegments(hashed)) - ) -} - -module.exports.hashKey = hashKey - -function hashKey (key) { - return hash(key, 'sha256') -} - -module.exports.hashEntry = hashEntry - -function hashEntry (str) { - return hash(str, 'sha1') -} - -function hash (str, digest) { - return crypto - .createHash(digest) - .update(str) - .digest('hex') -} - -function formatEntry (cache, entry, keepAll) { - // Treat null digests as deletions. They'll shadow any previous entries. - if (!entry.integrity && !keepAll) { - return null - } - - return { - key: entry.key, - integrity: entry.integrity, - path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, - size: entry.size, - time: entry.time, - metadata: entry.metadata, - } -} - -function readdirOrEmpty (dir) { - return fs.readdir(dir).catch((err) => { - if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { - return [] - } - - throw err - }) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js deleted file mode 100644 index 254b4ec..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js +++ /dev/null @@ -1,225 +0,0 @@ -'use strict' - -const Collect = require('minipass-collect') -const Minipass = require('minipass') -const Pipeline = require('minipass-pipeline') - -const index = require('./entry-index') -const memo = require('./memoization') -const read = require('./content/read') - -async function getData (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = await read(cache, entry.integrity, { integrity, size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return { - data, - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} -module.exports = getData - -async function getDataByDigest (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, key, opts) - if (memoized && memoize !== false) { - return memoized - } - - const res = await read(cache, key, { integrity, size }) - if (memoize) { - memo.put.byDigest(cache, key, res, opts) - } - return res -} -module.exports.byDigest = getDataByDigest - -function getDataSync (cache, key, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get(cache, key, opts) - - if (memoized && memoize !== false) { - return { - metadata: memoized.entry.metadata, - data: memoized.data, - integrity: memoized.entry.integrity, - size: memoized.entry.size, - } - } - const entry = index.find.sync(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - const data = read.sync(cache, entry.integrity, { - integrity: integrity, - size: size, - }) - const res = { - metadata: entry.metadata, - data: data, - size: entry.size, - integrity: entry.integrity, - } - if (memoize) { - memo.put(cache, entry, res.data, opts) - } - - return res -} - -module.exports.sync = getDataSync - -function getDataByDigestSync (cache, digest, opts = {}) { - const { integrity, memoize, size } = opts - const memoized = memo.get.byDigest(cache, digest, opts) - - if (memoized && memoize !== false) { - return memoized - } - - const res = read.sync(cache, digest, { - integrity: integrity, - size: size, - }) - if (memoize) { - memo.put.byDigest(cache, digest, res, opts) - } - - return res -} -module.exports.sync.byDigest = getDataByDigestSync - -const getMemoizedStream = (memoized) => { - const stream = new Minipass() - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(memoized.entry.metadata) - ev === 'integrity' && cb(memoized.entry.integrity) - ev === 'size' && cb(memoized.entry.size) - }) - stream.end(memoized.data) - return stream -} - -function getStream (cache, key, opts = {}) { - const { memoize, size } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return getMemoizedStream(memoized) - } - - const stream = new Pipeline() - // Set all this up to run on the stream and then just return the stream - Promise.resolve().then(async () => { - const entry = await index.find(cache, key) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - - stream.emit('metadata', entry.metadata) - stream.emit('integrity', entry.integrity) - stream.emit('size', entry.size) - stream.on('newListener', function (ev, cb) { - ev === 'metadata' && cb(entry.metadata) - ev === 'integrity' && cb(entry.integrity) - ev === 'size' && cb(entry.size) - }) - - const src = read.readStream( - cache, - entry.integrity, - { ...opts, size: typeof size !== 'number' ? entry.size : size } - ) - - if (memoize) { - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put(cache, entry, data, opts)) - stream.unshift(memoStream) - } - stream.unshift(src) - return stream - }).catch((err) => stream.emit('error', err)) - - return stream -} - -module.exports.stream = getStream - -function getStreamDigest (cache, integrity, opts = {}) { - const { memoize } = opts - const memoized = memo.get.byDigest(cache, integrity, opts) - if (memoized && memoize !== false) { - const stream = new Minipass() - stream.end(memoized) - return stream - } else { - const stream = read.readStream(cache, integrity, opts) - if (!memoize) { - return stream - } - - const memoStream = new Collect.PassThrough() - memoStream.on('collect', data => memo.put.byDigest( - cache, - integrity, - data, - opts - )) - return new Pipeline(stream, memoStream) - } -} - -module.exports.stream.byDigest = getStreamDigest - -function info (cache, key, opts = {}) { - const { memoize } = opts - const memoized = memo.get(cache, key, opts) - if (memoized && memoize !== false) { - return Promise.resolve(memoized.entry) - } else { - return index.find(cache, key) - } -} -module.exports.info = info - -async function copy (cache, key, dest, opts = {}) { - const entry = await index.find(cache, key, opts) - if (!entry) { - throw new index.NotFoundError(cache, key) - } - await read.copy(cache, entry.integrity, dest, opts) - return { - metadata: entry.metadata, - size: entry.size, - integrity: entry.integrity, - } -} - -module.exports.copy = copy - -async function copyByDigest (cache, key, dest, opts = {}) { - await read.copy(cache, key, dest, opts) - return key -} - -module.exports.copy.byDigest = copyByDigest - -module.exports.hasContent = read.hasContent diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js deleted file mode 100644 index 1c56be6..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js +++ /dev/null @@ -1,45 +0,0 @@ -'use strict' - -const get = require('./get.js') -const put = require('./put.js') -const rm = require('./rm.js') -const verify = require('./verify.js') -const { clearMemoized } = require('./memoization.js') -const tmp = require('./util/tmp.js') -const index = require('./entry-index.js') - -module.exports.index = {} -module.exports.index.compact = index.compact -module.exports.index.insert = index.insert - -module.exports.ls = index.ls -module.exports.ls.stream = index.lsStream - -module.exports.get = get -module.exports.get.byDigest = get.byDigest -module.exports.get.sync = get.sync -module.exports.get.sync.byDigest = get.sync.byDigest -module.exports.get.stream = get.stream -module.exports.get.stream.byDigest = get.stream.byDigest -module.exports.get.copy = get.copy -module.exports.get.copy.byDigest = get.copy.byDigest -module.exports.get.info = get.info -module.exports.get.hasContent = get.hasContent -module.exports.get.hasContent.sync = get.hasContent.sync - -module.exports.put = put -module.exports.put.stream = put.stream - -module.exports.rm = rm.entry -module.exports.rm.all = rm.all -module.exports.rm.entry = module.exports.rm -module.exports.rm.content = rm.content - -module.exports.clearMemoized = clearMemoized - -module.exports.tmp = {} -module.exports.tmp.mkdir = tmp.mkdir -module.exports.tmp.withTmp = tmp.withTmp - -module.exports.verify = verify -module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js deleted file mode 100644 index 0ff604a..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js +++ /dev/null @@ -1,72 +0,0 @@ -'use strict' - -const LRU = require('lru-cache') - -const MEMOIZED = new LRU({ - max: 500, - maxSize: 50 * 1024 * 1024, // 50MB - ttl: 3 * 60 * 1000, // 3 minutes - sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, -}) - -module.exports.clearMemoized = clearMemoized - -function clearMemoized () { - const old = {} - MEMOIZED.forEach((v, k) => { - old[k] = v - }) - MEMOIZED.clear() - return old -} - -module.exports.put = put - -function put (cache, entry, data, opts) { - pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) - putDigest(cache, entry.integrity, data, opts) -} - -module.exports.put.byDigest = putDigest - -function putDigest (cache, integrity, data, opts) { - pickMem(opts).set(`digest:${cache}:${integrity}`, data) -} - -module.exports.get = get - -function get (cache, key, opts) { - return pickMem(opts).get(`key:${cache}:${key}`) -} - -module.exports.get.byDigest = getDigest - -function getDigest (cache, integrity, opts) { - return pickMem(opts).get(`digest:${cache}:${integrity}`) -} - -class ObjProxy { - constructor (obj) { - this.obj = obj - } - - get (key) { - return this.obj[key] - } - - set (key, val) { - this.obj[key] = val - } -} - -function pickMem (opts) { - if (!opts || !opts.memoize) { - return MEMOIZED - } else if (opts.memoize.get && opts.memoize.set) { - return opts.memoize - } else if (typeof opts.memoize === 'object') { - return new ObjProxy(opts.memoize) - } else { - return MEMOIZED - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js deleted file mode 100644 index 9fc932d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js +++ /dev/null @@ -1,80 +0,0 @@ -'use strict' - -const index = require('./entry-index') -const memo = require('./memoization') -const write = require('./content/write') -const Flush = require('minipass-flush') -const { PassThrough } = require('minipass-collect') -const Pipeline = require('minipass-pipeline') - -const putOpts = (opts) => ({ - algorithms: ['sha512'], - ...opts, -}) - -module.exports = putData - -async function putData (cache, key, data, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - const res = await write(cache, data, opts) - const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) - if (memoize) { - memo.put(cache, entry, data, opts) - } - - return res.integrity -} - -module.exports.stream = putStream - -function putStream (cache, key, opts = {}) { - const { memoize } = opts - opts = putOpts(opts) - let integrity - let size - let error - - let memoData - const pipeline = new Pipeline() - // first item in the pipeline is the memoizer, because we need - // that to end first and get the collected data. - if (memoize) { - const memoizer = new PassThrough().on('collect', data => { - memoData = data - }) - pipeline.push(memoizer) - } - - // contentStream is a write-only, not a passthrough - // no data comes out of it. - const contentStream = write.stream(cache, opts) - .on('integrity', (int) => { - integrity = int - }) - .on('size', (s) => { - size = s - }) - .on('error', (err) => { - error = err - }) - - pipeline.push(contentStream) - - // last but not least, we write the index and emit hash and size, - // and memoize if we're doing that - pipeline.push(new Flush({ - async flush () { - if (!error) { - const entry = await index.insert(cache, key, integrity, { ...opts, size }) - if (memoize && memoData) { - memo.put(cache, entry, memoData, opts) - } - pipeline.emit('integrity', integrity) - pipeline.emit('size', size) - } - }, - })) - - return pipeline -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js deleted file mode 100644 index 5f00071..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js +++ /dev/null @@ -1,31 +0,0 @@ -'use strict' - -const util = require('util') - -const index = require('./entry-index') -const memo = require('./memoization') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const rmContent = require('./content/rm') - -module.exports = entry -module.exports.entry = entry - -function entry (cache, key, opts) { - memo.clearMemoized() - return index.delete(cache, key, opts) -} - -module.exports.content = content - -function content (cache, integrity) { - memo.clearMemoized() - return rmContent(cache, integrity) -} - -module.exports.all = all - -function all (cache) { - memo.clearMemoized() - return rimraf(path.join(cache, '*(content-*|index-*)')) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js deleted file mode 100644 index 182fcb0..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/fix-owner.js +++ /dev/null @@ -1,145 +0,0 @@ -'use strict' - -const util = require('util') - -const chownr = util.promisify(require('chownr')) -const mkdirp = require('mkdirp') -const inflight = require('promise-inflight') -const inferOwner = require('infer-owner') - -// Memoize getuid()/getgid() calls. -// patch process.setuid/setgid to invalidate cached value on change -const self = { uid: null, gid: null } -const getSelf = () => { - if (typeof self.uid !== 'number') { - self.uid = process.getuid() - const setuid = process.setuid - process.setuid = (uid) => { - self.uid = null - process.setuid = setuid - return process.setuid(uid) - } - } - if (typeof self.gid !== 'number') { - self.gid = process.getgid() - const setgid = process.setgid - process.setgid = (gid) => { - self.gid = null - process.setgid = setgid - return process.setgid(gid) - } - } -} - -module.exports.chownr = fixOwner - -async function fixOwner (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - const { uid, gid } = await inferOwner(cache) - - // No need to override if it's already what we used. - if (self.uid === uid && self.gid === gid) { - return - } - - return inflight('fixOwner: fixing ownership on ' + filepath, () => - chownr( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ).catch((err) => { - if (err.code === 'ENOENT') { - return null - } - - throw err - }) - ) -} - -module.exports.chownr.sync = fixOwnerSync - -function fixOwnerSync (cache, filepath) { - if (!process.getuid) { - // This platform doesn't need ownership fixing - return - } - const { uid, gid } = inferOwner.sync(cache) - getSelf() - if (self.uid !== 0) { - // almost certainly can't chown anyway - return - } - - if (self.uid === uid && self.gid === gid) { - // No need to override if it's already what we used. - return - } - try { - chownr.sync( - filepath, - typeof uid === 'number' ? uid : self.uid, - typeof gid === 'number' ? gid : self.gid - ) - } catch (err) { - // only catch ENOENT, any other error is a problem. - if (err.code === 'ENOENT') { - return null - } - - throw err - } -} - -module.exports.mkdirfix = mkdirfix - -async function mkdirfix (cache, p, cb) { - // we have to infer the owner _before_ making the directory, even though - // we aren't going to use the results, since the cache itself might not - // exist yet. If we mkdirp it, then our current uid/gid will be assumed - // to be correct if it creates the cache folder in the process. - await inferOwner(cache) - try { - const made = await mkdirp(p) - if (made) { - await fixOwner(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - await fixOwner(cache, p) - return null - } - throw err - } -} - -module.exports.mkdirfix.sync = mkdirfixSync - -function mkdirfixSync (cache, p) { - try { - inferOwner.sync(cache) - const made = mkdirp.sync(p) - if (made) { - fixOwnerSync(cache, made) - return made - } - } catch (err) { - if (err.code === 'EEXIST') { - fixOwnerSync(cache, p) - return null - } else { - throw err - } - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js deleted file mode 100644 index 445599b..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict' - -module.exports = hashToSegments - -function hashToSegments (hash) { - return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js deleted file mode 100644 index a0b4041..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/move-file.js +++ /dev/null @@ -1,56 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') -const move = require('@npmcli/move-file') -const pinflight = require('promise-inflight') - -module.exports = moveFile - -async function moveFile (src, dest) { - const isWindows = process.platform === 'win32' - - // This isn't quite an fs.rename -- the assumption is that - // if `dest` already exists, and we get certain errors while - // trying to move it, we should just not bother. - // - // In the case of cache corruption, users will receive an - // EINTEGRITY error elsewhere, and can remove the offending - // content their own way. - // - // Note that, as the name suggests, this strictly only supports file moves. - try { - await fs.link(src, dest) - } catch (err) { - if (isWindows && err.code === 'EPERM') { - // XXX This is a really weird way to handle this situation, as it - // results in the src file being deleted even though the dest - // might not exist. Since we pretty much always write files to - // deterministic locations based on content hash, this is likely - // ok (or at worst, just ends in a future cache miss). But it would - // be worth investigating at some time in the future if this is - // really what we want to do here. - } else if (err.code === 'EEXIST' || err.code === 'EBUSY') { - // file already exists, so whatever - } else { - throw err - } - } - try { - await Promise.all([ - fs.unlink(src), - !isWindows && fs.chmod(dest, '0444'), - ]) - } catch (e) { - return pinflight('cacache-move-file:' + dest, async () => { - await fs.stat(dest).catch((err) => { - if (err.code !== 'ENOENT') { - // Something else is wrong here. Bail bail bail - throw err - } - }) - // file doesn't already exist! let's try a rename -> copy fallback - // only delete if it successfully copies - return move(src, dest) - }) - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js deleted file mode 100644 index b4437cf..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js +++ /dev/null @@ -1,33 +0,0 @@ -'use strict' - -const fs = require('@npmcli/fs') - -const fixOwner = require('./fix-owner') -const path = require('path') - -module.exports.mkdir = mktmpdir - -async function mktmpdir (cache, opts = {}) { - const { tmpPrefix } = opts - const tmpDir = path.join(cache, 'tmp') - await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) - // do not use path.join(), it drops the trailing / if tmpPrefix is unset - const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` - return fs.mkdtemp(target, { owner: 'inherit' }) -} - -module.exports.withTmp = withTmp - -function withTmp (cache, opts, cb) { - if (!cb) { - cb = opts - opts = {} - } - return fs.withTempDir(path.join(cache, 'tmp'), cb, opts) -} - -module.exports.fix = fixtmpdir - -function fixtmpdir (cache) { - return fixOwner(cache, path.join(cache, 'tmp')) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js deleted file mode 100644 index 52692a0..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js +++ /dev/null @@ -1,257 +0,0 @@ -'use strict' - -const util = require('util') - -const pMap = require('p-map') -const contentPath = require('./content/path') -const fixOwner = require('./util/fix-owner') -const fs = require('@npmcli/fs') -const fsm = require('fs-minipass') -const glob = util.promisify(require('glob')) -const index = require('./entry-index') -const path = require('path') -const rimraf = util.promisify(require('rimraf')) -const ssri = require('ssri') - -const globify = pattern => pattern.split('\\').join('/') - -const hasOwnProperty = (obj, key) => - Object.prototype.hasOwnProperty.call(obj, key) - -const verifyOpts = (opts) => ({ - concurrency: 20, - log: { silly () {} }, - ...opts, -}) - -module.exports = verify - -async function verify (cache, opts) { - opts = verifyOpts(opts) - opts.log.silly('verify', 'verifying cache at', cache) - - const steps = [ - markStartTime, - fixPerms, - garbageCollect, - rebuildIndex, - cleanTmp, - writeVerifile, - markEndTime, - ] - - const stats = {} - for (const step of steps) { - const label = step.name - const start = new Date() - const s = await step(cache, opts) - if (s) { - Object.keys(s).forEach((k) => { - stats[k] = s[k] - }) - } - const end = new Date() - if (!stats.runTime) { - stats.runTime = {} - } - stats.runTime[label] = end - start - } - stats.runTime.total = stats.endTime - stats.startTime - opts.log.silly( - 'verify', - 'verification finished for', - cache, - 'in', - `${stats.runTime.total}ms` - ) - return stats -} - -async function markStartTime (cache, opts) { - return { startTime: new Date() } -} - -async function markEndTime (cache, opts) { - return { endTime: new Date() } -} - -async function fixPerms (cache, opts) { - opts.log.silly('verify', 'fixing cache permissions') - await fixOwner.mkdirfix(cache, cache) - // TODO - fix file permissions too - await fixOwner.chownr(cache, cache) - return null -} - -// Implements a naive mark-and-sweep tracing garbage collector. -// -// The algorithm is basically as follows: -// 1. Read (and filter) all index entries ("pointers") -// 2. Mark each integrity value as "live" -// 3. Read entire filesystem tree in `content-vX/` dir -// 4. If content is live, verify its checksum and delete it if it fails -// 5. If content is not marked as live, rimraf it. -// -async function garbageCollect (cache, opts) { - opts.log.silly('verify', 'garbage collecting content') - const indexStream = index.lsStream(cache) - const liveContent = new Set() - indexStream.on('data', (entry) => { - if (opts.filter && !opts.filter(entry)) { - return - } - - liveContent.add(entry.integrity.toString()) - }) - await new Promise((resolve, reject) => { - indexStream.on('end', resolve).on('error', reject) - }) - const contentDir = contentPath.contentDir(cache) - const files = await glob(globify(path.join(contentDir, '**')), { - follow: false, - nodir: true, - nosort: true, - }) - const stats = { - verifiedContent: 0, - reclaimedCount: 0, - reclaimedSize: 0, - badContentCount: 0, - keptSize: 0, - } - await pMap( - files, - async (f) => { - const split = f.split(/[/\\]/) - const digest = split.slice(split.length - 3).join('') - const algo = split[split.length - 4] - const integrity = ssri.fromHex(digest, algo) - if (liveContent.has(integrity.toString())) { - const info = await verifyContent(f, integrity) - if (!info.valid) { - stats.reclaimedCount++ - stats.badContentCount++ - stats.reclaimedSize += info.size - } else { - stats.verifiedContent++ - stats.keptSize += info.size - } - } else { - // No entries refer to this content. We can delete. - stats.reclaimedCount++ - const s = await fs.stat(f) - await rimraf(f) - stats.reclaimedSize += s.size - } - return stats - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function verifyContent (filepath, sri) { - const contentInfo = {} - try { - const { size } = await fs.stat(filepath) - contentInfo.size = size - contentInfo.valid = true - await ssri.checkStream(new fsm.ReadStream(filepath), sri) - } catch (err) { - if (err.code === 'ENOENT') { - return { size: 0, valid: false } - } - if (err.code !== 'EINTEGRITY') { - throw err - } - - await rimraf(filepath) - contentInfo.valid = false - } - return contentInfo -} - -async function rebuildIndex (cache, opts) { - opts.log.silly('verify', 'rebuilding index') - const entries = await index.ls(cache) - const stats = { - missingContent: 0, - rejectedEntries: 0, - totalEntries: 0, - } - const buckets = {} - for (const k in entries) { - /* istanbul ignore else */ - if (hasOwnProperty(entries, k)) { - const hashed = index.hashKey(k) - const entry = entries[k] - const excluded = opts.filter && !opts.filter(entry) - excluded && stats.rejectedEntries++ - if (buckets[hashed] && !excluded) { - buckets[hashed].push(entry) - } else if (buckets[hashed] && excluded) { - // skip - } else if (excluded) { - buckets[hashed] = [] - buckets[hashed]._path = index.bucketPath(cache, k) - } else { - buckets[hashed] = [entry] - buckets[hashed]._path = index.bucketPath(cache, k) - } - } - } - await pMap( - Object.keys(buckets), - (key) => { - return rebuildBucket(cache, buckets[key], stats, opts) - }, - { concurrency: opts.concurrency } - ) - return stats -} - -async function rebuildBucket (cache, bucket, stats, opts) { - await fs.truncate(bucket._path) - // This needs to be serialized because cacache explicitly - // lets very racy bucket conflicts clobber each other. - for (const entry of bucket) { - const content = contentPath(cache, entry.integrity) - try { - await fs.stat(content) - await index.insert(cache, entry.key, entry.integrity, { - metadata: entry.metadata, - size: entry.size, - }) - stats.totalEntries++ - } catch (err) { - if (err.code === 'ENOENT') { - stats.rejectedEntries++ - stats.missingContent++ - } else { - throw err - } - } - } -} - -function cleanTmp (cache, opts) { - opts.log.silly('verify', 'cleaning tmp directory') - return rimraf(path.join(cache, 'tmp')) -} - -function writeVerifile (cache, opts) { - const verifile = path.join(cache, '_lastverified') - opts.log.silly('verify', 'writing verifile to ' + verifile) - try { - return fs.writeFile(verifile, `${Date.now()}`) - } finally { - fixOwner.chownr.sync(cache, verifile) - } -} - -module.exports.lastRun = lastRun - -async function lastRun (cache) { - const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) - return new Date(+data) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE deleted file mode 100644 index de32266..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2013 Julian Gruber - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js deleted file mode 100644 index 4af9dde..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js +++ /dev/null @@ -1,203 +0,0 @@ -var balanced = require('balanced-match'); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m) return [str]; - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - if (/\$$/.test(m.pre)) { - for (var k = 0; k < post.length; k++) { - var expansion = pre+ '{' + m.body + '}' + post[k]; - expansions.push(expansion); - } - } else { - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = []; - - for (var j = 0; j < n.length; j++) { - N.push.apply(N, expand(n[j], false)); - } - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - } - - return expansions; -} - diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json deleted file mode 100644 index 7097d41..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "name": "brace-expansion", - "description": "Brace expansion as known from sh/bash", - "version": "2.0.1", - "repository": { - "type": "git", - "url": "git://github.com/juliangruber/brace-expansion.git" - }, - "homepage": "https://github.com/juliangruber/brace-expansion", - "main": "index.js", - "scripts": { - "test": "tape test/*.js", - "gentest": "bash test/generate.sh", - "bench": "matcha test/perf/bench.js" - }, - "dependencies": { - "balanced-match": "^1.0.0" - }, - "devDependencies": { - "@c4312/matcha": "^1.3.1", - "tape": "^4.6.0" - }, - "keywords": [], - "author": { - "name": "Julian Gruber", - "email": "mail@juliangruber.com", - "url": "http://juliangruber.com" - }, - "license": "MIT", - "testling": { - "files": "test/*.js", - "browsers": [ - "ie/8..latest", - "firefox/20..latest", - "firefox/nightly", - "chrome/25..latest", - "chrome/canary", - "opera/12..latest", - "opera/next", - "safari/5.1..latest", - "ipad/6.0..latest", - "iphone/6.0..latest", - "android-browser/4.2..latest" - ] - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE deleted file mode 100644 index 39e8fe1..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js deleted file mode 100644 index 61a4452..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/common.js +++ /dev/null @@ -1,244 +0,0 @@ -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored - -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) -} - -var fs = require("fs") -var path = require("path") -var minimatch = require("minimatch") -var isAbsolute = require("path").isAbsolute -var Minimatch = minimatch.Minimatch - -function alphasort (a, b) { - return a.localeCompare(b, 'en') -} - -function setupIgnores (self, options) { - self.ignore = options.ignore || [] - - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] - - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) - } -} - -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) - } - - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher - } -} - -function setopts (self, pattern, options) { - if (!options) - options = {} - - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") - } - pattern = "**/" + pattern - } - - self.windowsPathsNoEscape = !!options.windowsPathsNoEscape || - options.allowWindowsEscape === false - if (self.windowsPathsNoEscape) { - pattern = pattern.replace(/\\/g, '/') - } - - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute - self.fs = options.fs || fs - - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) - - setupIgnores(self, options) - - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = path.resolve(cwd) - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd - } - - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - self.nomount = !!options.nomount - - if (process.platform === "win32") { - self.root = self.root.replace(/\\/g, "/") - self.cwd = self.cwd.replace(/\\/g, "/") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - } - - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true - - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options -} - -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) - - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true - } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) - } - } - - if (!nou) - all = Object.keys(all) - - if (!self.nosort) - all = all.sort(alphasort) - - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) - } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) - } - } - - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) - - self.found = all -} - -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' - - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) - - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] - } - } - - return m -} - -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) - } - - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') - - return abs -} - - -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false - - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) -} - -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false - - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js deleted file mode 100644 index 2112a95..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/glob.js +++ /dev/null @@ -1,790 +0,0 @@ -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var rp = require('fs.realpath') -var minimatch = require('minimatch') -var Minimatch = minimatch.Minimatch -var inherits = require('inherits') -var EE = require('events').EventEmitter -var path = require('path') -var assert = require('assert') -var isAbsolute = require('path').isAbsolute -var globSync = require('./sync.js') -var common = require('./common.js') -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = require('inflight') -var util = require('util') -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = require('once') - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} - -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync - -// old api surface -glob.glob = glob - -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } - - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] - } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - - if (!pattern) - return false - - if (set.length > 1) - return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } - - return false -} - -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } - - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) - - setopts(this, pattern, options) - this._didRealPath = false - - // process each pattern in the minimatch set - var n = this.minimatch.set.length - - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) - - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } - - var self = this - this._processing = 0 - - this._emitQueue = [] - this._processQueue = [] - this.paused = false - - if (this.noprocess) - return this - - if (n === 0) - return done() - - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false - - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } -} - -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return - - if (this.realpath && !this._didRealpath) - return this._realpath() - - common.finish(this) - this.emit('end', this.found) -} - -Glob.prototype._realpath = function () { - if (this._didRealpath) - return - - this._didRealpath = true - - var n = this.matches.length - if (n === 0) - return this._finish() - - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) - - function next () { - if (--n === 0) - self._finish() - } -} - -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - - var found = Object.keys(matchset) - var self = this - var n = found.length - - if (n === 0) - return cb() - - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here - - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) -} - -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} - -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} - -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } -} - -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } -} - -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') - - if (this.aborted) - return - - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return - } - - //console.error('PROCESS %d', this._processing, pattern) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) -} - -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() -} - -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return - - if (isIgnored(this, e)) - return - - if (this.paused) { - this._emitQueue.push([index, e]) - return - } - - var abs = isAbsolute(e) ? e : this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) - e = abs - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) - - this.emit('match', e) -} - -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) - - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) - - if (lstatcb) - self.fs.lstat(abs, lstatcb) - - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() - - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} - -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return - - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return - - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() - - if (Array.isArray(c)) - return cb(null, c) - } - - var self = this - self.fs.readdir(abs, readdirCb(this, abs, cb)) -} - -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } -} - -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return - - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - return cb(null, entries) -} - -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return - - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } - - return cb() -} - -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - - -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) - - var isSym = this.symlinks[abs] - var len = entries.length - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } - - cb() -} - -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - - //console.error('ps2', prefix, exists) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} - -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return cb() - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) - - if (needDir && c === 'FILE') - return cb() - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) - } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - self.fs.lstat(abs, statcb) - - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return self.fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) - } - } -} - -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } - - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() - - return cb(null, c, stat) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json deleted file mode 100644 index ca0fd91..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json +++ /dev/null @@ -1,55 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "name": "glob", - "description": "a little globber", - "version": "8.1.0", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/node-glob.git" - }, - "main": "glob.js", - "files": [ - "glob.js", - "sync.js", - "common.js" - ], - "engines": { - "node": ">=12" - }, - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^5.0.1", - "once": "^1.3.0" - }, - "devDependencies": { - "memfs": "^3.2.0", - "mkdirp": "0", - "rimraf": "^2.2.8", - "tap": "^16.0.1", - "tick": "0.0.6" - }, - "tap": { - "before": "test/00-setup.js", - "after": "test/zz-cleanup.js", - "statements": 90, - "branches": 90, - "functions": 90, - "lines": 90, - "jobs": 1 - }, - "scripts": { - "prepublish": "npm run benchclean", - "profclean": "rm -f v8.log profile.txt", - "test": "tap", - "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", - "bench": "bash benchmark.sh", - "prof": "bash prof.sh && cat profile.txt", - "benchclean": "node benchclean.js" - }, - "license": "ISC", - "funding": { - "url": "https://github.com/sponsors/isaacs" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js deleted file mode 100644 index af4600d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/glob/sync.js +++ /dev/null @@ -1,486 +0,0 @@ -module.exports = globSync -globSync.GlobSync = GlobSync - -var rp = require('fs.realpath') -var minimatch = require('minimatch') -var Minimatch = minimatch.Minimatch -var Glob = require('./glob.js').Glob -var util = require('util') -var path = require('path') -var assert = require('assert') -var isAbsolute = require('path').isAbsolute -var common = require('./common.js') -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - return new GlobSync(pattern, options).found -} - -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') - - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) - - setopts(this, pattern, options) - - if (this.noprocess) - return this - - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} - -GlobSync.prototype._finish = function () { - assert.ok(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er - } - } - }) - } - common.finish(this) -} - - -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert.ok(this instanceof GlobSync) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip processing - if (childrenIgnored(this, read)) - return - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} - - -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) - } -} - - -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return - - var abs = this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) { - e = abs - } - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - if (this.stat) - this._stat(e) -} - - -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) - - var entries - var lstat - var stat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null - } - } - - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) - - return entries -} - -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries - - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null - - if (Array.isArray(c)) - return c - } - - try { - return this._readdirEntries(abs, this.fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null - } -} - -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - - // mark and cache dir-ness - return entries -} - -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break - } -} - -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - - var entries = this._readdir(abs, inGlobStar) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) - - var len = entries.length - var isSym = this.symlinks[abs] - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } -} - -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) -} - -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return false - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c - - if (needDir && c === 'FILE') - return false - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } - - if (lstat && lstat.isSymbolicLink()) { - try { - stat = this.fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat - } - } - - this.statCache[abs] = stat - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return false - - return c -} - -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} - -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE deleted file mode 100644 index 1493534..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js deleted file mode 100644 index ffe453d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/lib/path.js +++ /dev/null @@ -1,4 +0,0 @@ -const isWindows = typeof process === 'object' && - process && - process.platform === 'win32' -module.exports = isWindows ? { sep: '\\' } : { sep: '/' } diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js deleted file mode 100644 index 6c8bfc3..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/minimatch.js +++ /dev/null @@ -1,944 +0,0 @@ -const minimatch = module.exports = (p, pattern, options = {}) => { - assertValidPattern(pattern) - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - return new Minimatch(pattern, options).match(p) -} - -module.exports = minimatch - -const path = require('./lib/path.js') -minimatch.sep = path.sep - -const GLOBSTAR = Symbol('globstar **') -minimatch.GLOBSTAR = GLOBSTAR -const expand = require('brace-expansion') - -const plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -const qmark = '[^/]' - -// * => any number of characters -const star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// "abc" -> { a:true, b:true, c:true } -const charSet = s => s.split('').reduce((set, c) => { - set[c] = true - return set -}, {}) - -// characters that need to be escaped in RegExp. -const reSpecials = charSet('().*{}+?[]^$\\!') - -// characters that indicate we have to add the pattern start -const addPatternStartSet = charSet('[.(') - -// normalizes slashes. -const slashSplit = /\/+/ - -minimatch.filter = (pattern, options = {}) => - (p, i, list) => minimatch(p, pattern, options) - -const ext = (a, b = {}) => { - const t = {} - Object.keys(a).forEach(k => t[k] = a[k]) - Object.keys(b).forEach(k => t[k] = b[k]) - return t -} - -minimatch.defaults = def => { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch - } - - const orig = minimatch - - const m = (p, pattern, options) => orig(p, pattern, ext(def, options)) - m.Minimatch = class Minimatch extends orig.Minimatch { - constructor (pattern, options) { - super(pattern, ext(def, options)) - } - } - m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch - m.filter = (pattern, options) => orig.filter(pattern, ext(def, options)) - m.defaults = options => orig.defaults(ext(def, options)) - m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options)) - m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options)) - m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options)) - - return m -} - - - - - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options) - -const braceExpand = (pattern, options = {}) => { - assertValidPattern(pattern) - - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) -} - -const MAX_PATTERN_LENGTH = 1024 * 64 -const assertValidPattern = pattern => { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') - } - - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') - } -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -const SUBPARSE = Symbol('subparse') - -minimatch.makeRe = (pattern, options) => - new Minimatch(pattern, options || {}).makeRe() - -minimatch.match = (list, pattern, options = {}) => { - const mm = new Minimatch(pattern, options) - list = list.filter(f => mm.match(f)) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list -} - -// replace stuff like \* with * -const globUnescape = s => s.replace(/\\(.)/g, '$1') -const charUnescape = s => s.replace(/\\([^-\]])/g, '$1') -const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&') - -class Minimatch { - constructor (pattern, options) { - assertValidPattern(pattern) - - if (!options) options = {} - - this.options = options - this.set = [] - this.pattern = pattern - this.windowsPathsNoEscape = !!options.windowsPathsNoEscape || - options.allowWindowsEscape === false - if (this.windowsPathsNoEscape) { - this.pattern = this.pattern.replace(/\\/g, '/') - } - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial - - // make the set of regexps etc. - this.make() - } - - debug () {} - - make () { - const pattern = this.pattern - const options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } - - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - let set = this.globSet = this.braceExpand() - - if (options.debug) this.debug = (...args) => console.error(...args) - - this.debug(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(s => s.split(slashSplit)) - - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map((s, si, set) => s.map(this.parse, this)) - - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(s => s.indexOf(false) === -1) - - this.debug(this.pattern, set) - - this.set = set - } - - parseNegate () { - if (this.options.nonegate) return - - const pattern = this.pattern - let negate = false - let negateOffset = 0 - - for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) { - negate = !negate - negateOffset++ - } - - if (negateOffset) this.pattern = pattern.slice(negateOffset) - this.negate = negate - } - - // set partial to true to test if, for example, - // "/a/b" matches the start of "/*/b/*/d" - // Partial means, if you run out of file before you run - // out of pattern, then that's fine, as long as all - // the parts match. - matchOne (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] - - this.debug(pattern, p, f) - - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false - - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] - - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } - - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } - - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') - } - - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') - } - - braceExpand () { - return braceExpand(this.pattern, this.options) - } - - parse (pattern, isSub) { - assertValidPattern(pattern) - - const options = this.options - - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' - - let re = '' - let hasMagic = false - let escaping = false - // ? => one single character - const patternListStack = [] - const negativeLists = [] - let stateChar - let inClass = false - let reClassStart = -1 - let classStart = -1 - let cs - let pl - let sp - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. However, if the pattern - // starts with ., then traversal patterns can match. - let dotTravAllowed = pattern.charAt(0) === '.' - let dotFileAllowed = options.dot || dotTravAllowed - const patternStart = () => - dotTravAllowed - ? '' - : dotFileAllowed - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' - const subPatternStart = (p) => - p.charAt(0) === '.' - ? '' - : options.dot - ? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))' - : '(?!\\.)' - - - const clearStateChar = () => { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - this.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - - for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping) { - /* istanbul ignore next - completely not allowed, even escaped. */ - if (c === '/') { - return false - } - - if (reSpecials[c]) { - re += '\\' - } - re += c - escaping = false - continue - } - - switch (c) { - /* istanbul ignore next */ - case '/': { - // Should already be path-split by now. - return false - } - - case '\\': - if (inClass && pattern.charAt(i + 1) === '-') { - re += c - continue - } - - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - this.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case '(': { - if (inClass) { - re += '(' - continue - } - - if (!stateChar) { - re += '\\(' - continue - } - - const plEntry = { - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close, - } - this.debug(this.pattern, '\t', plEntry) - patternListStack.push(plEntry) - // negation is (?:(?!(?:js)(?:))[^/]*) - re += plEntry.open - // next entry starts with a dot maybe? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - } - - case ')': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\)' - continue - } - patternListStack.pop() - - // closing an extglob - clearStateChar() - hasMagic = true - pl = plEntry - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(Object.assign(pl, { reEnd: re.length })) - } - continue - } - - case '|': { - const plEntry = patternListStack[patternListStack.length - 1] - if (inClass || !plEntry) { - re += '\\|' - continue - } - - clearStateChar() - re += '|' - // next subpattern can start with a dot? - if (plEntry.start === 0 && plEntry.type !== '!') { - dotTravAllowed = true - re += subPatternStart(pattern.slice(i + 1)) - } - continue - } - - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += '\\' + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - continue - } - - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + braExpEscape(charUnescape(cs)) + ']') - // looks good, finish up the class. - re += c - } catch (er) { - // out of order ranges in JS are errors, but in glob syntax, - // they're just a range that matches nothing. - re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever - } - hasMagic = true - inClass = false - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (reSpecials[c] && !(c === '^' && inClass)) { - re += '\\' - } - - re += c - break - - } // switch - } // for - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.slice(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substring(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - let tail - tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => { - /* istanbul ignore else - should already be done */ - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) - - this.debug('tail=%j\n %s', tail, tail, pl, re) - const t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - const addPatternStart = addPatternStartSet[re.charAt(0)] - - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (let n = negativeLists.length - 1; n > -1; n--) { - const nl = negativeLists[n] - - const nlBefore = re.slice(0, nl.reStart) - const nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - let nlAfter = re.slice(nl.reEnd) - const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter - - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - const closeParensBefore = nlBefore.split(')').length - const openParensBefore = nlBefore.split('(').length - closeParensBefore - let cleanAfter = nlAfter - for (let i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter - - const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : '' - - re = nlBefore + nlFirst + nlAfter + dollar + nlLast - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } - - if (addPatternStart) { - re = patternStart() + re - } - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - - // if it's nocase, and the lcase/uppercase don't match, it's magic - if (options.nocase && !hasMagic) { - hasMagic = pattern.toUpperCase() !== pattern.toLowerCase() - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } - - const flags = options.nocase ? 'i' : '' - try { - return Object.assign(new RegExp('^' + re + '$', flags), { - _glob: pattern, - _src: re, - }) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } - } - - makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - const set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp - } - const options = this.options - - const twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - const flags = options.nocase ? 'i' : '' - - // coalesce globstars and regexpify non-globstar patterns - // if it's the only item, then we just do one twoStar - // if it's the first, and there are more, prepend (\/|twoStar\/)? to next - // if it's the last, append (\/twoStar|) to previous - // if it's in the middle, append (\/|\/twoStar\/) to previous - // then filter out GLOBSTAR symbols - let re = set.map(pattern => { - pattern = pattern.map(p => - typeof p === 'string' ? regExpEscape(p) - : p === GLOBSTAR ? GLOBSTAR - : p._src - ).reduce((set, p) => { - if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) { - set.push(p) - } - return set - }, []) - pattern.forEach((p, i) => { - if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) { - return - } - if (i === 0) { - if (pattern.length > 1) { - pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1] - } else { - pattern[i] = twoStar - } - } else if (i === pattern.length - 1) { - pattern[i-1] += '(?:\\\/|' + twoStar + ')?' - } else { - pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1] - pattern[i+1] = GLOBSTAR - } - }) - return pattern.filter(p => p !== GLOBSTAR).join('/') - }).join('|') - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' - - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' - - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false - } - return this.regexp - } - - match (f, partial = this.partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - const options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - const set = this.set - this.debug(this.pattern, 'set', set) - - // Find the basename of the path by looking for the last non-empty segment - let filename - for (let i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } - - for (let i = 0; i < set.length; i++) { - const pattern = set[i] - let file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - const hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate - } - - static defaults (def) { - return minimatch.defaults(def).Minimatch - } -} - -minimatch.Minimatch = Minimatch diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json deleted file mode 100644 index c8809db..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "author": "Isaac Z. Schlueter (http://blog.izs.me)", - "name": "minimatch", - "description": "a glob matcher in javascript", - "publishConfig": { - "tag": "legacy-v5" - }, - "version": "5.1.6", - "repository": { - "type": "git", - "url": "git://github.com/isaacs/minimatch.git" - }, - "main": "minimatch.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags" - }, - "engines": { - "node": ">=10" - }, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "devDependencies": { - "tap": "^16.3.2" - }, - "license": "ISC", - "files": [ - "minimatch.js", - "lib" - ] -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/cacache/package.json deleted file mode 100644 index 7dbd407..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/cacache/package.json +++ /dev/null @@ -1,84 +0,0 @@ -{ - "name": "cacache", - "version": "16.1.3", - "cache-version": { - "content": "2", - "index": "5" - }, - "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "snap": "tap", - "coverage": "tap", - "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.js\"", - "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", - "postsnap": "npm run lintfix --", - "postlint": "template-oss-check", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/cacache.git" - }, - "keywords": [ - "cache", - "caching", - "content-addressable", - "sri", - "sri hash", - "subresource integrity", - "cache", - "storage", - "store", - "file store", - "filesystem", - "disk cache", - "disk storage" - ], - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^2.1.0", - "@npmcli/move-file": "^2.0.0", - "chownr": "^2.0.0", - "fs-minipass": "^2.1.0", - "glob": "^8.0.1", - "infer-owner": "^1.0.4", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "mkdirp": "^1.0.4", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^9.0.0", - "tar": "^6.1.11", - "unique-filename": "^2.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "windowsCI": false, - "version": "3.5.0" - }, - "author": "GitHub Inc." -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/LICENSE deleted file mode 100644 index 19129e3..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) Isaac Z. Schlueter and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/index.js deleted file mode 100644 index 9b0779c..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/index.js +++ /dev/null @@ -1,422 +0,0 @@ -'use strict' -const MiniPass = require('minipass') -const EE = require('events').EventEmitter -const fs = require('fs') - -let writev = fs.writev -/* istanbul ignore next */ -if (!writev) { - // This entire block can be removed if support for earlier than Node.js - // 12.9.0 is not needed. - const binding = process.binding('fs') - const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback - - writev = (fd, iovec, pos, cb) => { - const done = (er, bw) => cb(er, bw, iovec) - const req = new FSReqWrap() - req.oncomplete = done - binding.writeBuffers(fd, iovec, pos, req) - } -} - -const _autoClose = Symbol('_autoClose') -const _close = Symbol('_close') -const _ended = Symbol('_ended') -const _fd = Symbol('_fd') -const _finished = Symbol('_finished') -const _flags = Symbol('_flags') -const _flush = Symbol('_flush') -const _handleChunk = Symbol('_handleChunk') -const _makeBuf = Symbol('_makeBuf') -const _mode = Symbol('_mode') -const _needDrain = Symbol('_needDrain') -const _onerror = Symbol('_onerror') -const _onopen = Symbol('_onopen') -const _onread = Symbol('_onread') -const _onwrite = Symbol('_onwrite') -const _open = Symbol('_open') -const _path = Symbol('_path') -const _pos = Symbol('_pos') -const _queue = Symbol('_queue') -const _read = Symbol('_read') -const _readSize = Symbol('_readSize') -const _reading = Symbol('_reading') -const _remain = Symbol('_remain') -const _size = Symbol('_size') -const _write = Symbol('_write') -const _writing = Symbol('_writing') -const _defaultFlag = Symbol('_defaultFlag') -const _errored = Symbol('_errored') - -class ReadStream extends MiniPass { - constructor (path, opt) { - opt = opt || {} - super(opt) - - this.readable = true - this.writable = false - - if (typeof path !== 'string') - throw new TypeError('path must be a string') - - this[_errored] = false - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_path] = path - this[_readSize] = opt.readSize || 16*1024*1024 - this[_reading] = false - this[_size] = typeof opt.size === 'number' ? opt.size : Infinity - this[_remain] = this[_size] - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - - if (typeof this[_fd] === 'number') - this[_read]() - else - this[_open]() - } - - get fd () { return this[_fd] } - get path () { return this[_path] } - - write () { - throw new TypeError('this is a readable stream') - } - - end () { - throw new TypeError('this is a readable stream') - } - - [_open] () { - fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) - } - - [_onopen] (er, fd) { - if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_read]() - } - } - - [_makeBuf] () { - return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) - } - - [_read] () { - if (!this[_reading]) { - this[_reading] = true - const buf = this[_makeBuf]() - /* istanbul ignore if */ - if (buf.length === 0) - return process.nextTick(() => this[_onread](null, 0, buf)) - fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => - this[_onread](er, br, buf)) - } - } - - [_onread] (er, br, buf) { - this[_reading] = false - if (er) - this[_onerror](er) - else if (this[_handleChunk](br, buf)) - this[_read]() - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } - - [_onerror] (er) { - this[_reading] = true - this[_close]() - this.emit('error', er) - } - - [_handleChunk] (br, buf) { - let ret = false - // no effect if infinite - this[_remain] -= br - if (br > 0) - ret = super.write(br < buf.length ? buf.slice(0, br) : buf) - - if (br === 0 || this[_remain] <= 0) { - ret = false - this[_close]() - super.end() - } - - return ret - } - - emit (ev, data) { - switch (ev) { - case 'prefinish': - case 'finish': - break - - case 'drain': - if (typeof this[_fd] === 'number') - this[_read]() - break - - case 'error': - if (this[_errored]) - return - this[_errored] = true - return super.emit(ev, data) - - default: - return super.emit(ev, data) - } - } -} - -class ReadStreamSync extends ReadStream { - [_open] () { - let threw = true - try { - this[_onopen](null, fs.openSync(this[_path], 'r')) - threw = false - } finally { - if (threw) - this[_close]() - } - } - - [_read] () { - let threw = true - try { - if (!this[_reading]) { - this[_reading] = true - do { - const buf = this[_makeBuf]() - /* istanbul ignore next */ - const br = buf.length === 0 ? 0 - : fs.readSync(this[_fd], buf, 0, buf.length, null) - if (!this[_handleChunk](br, buf)) - break - } while (true) - this[_reading] = false - } - threw = false - } finally { - if (threw) - this[_close]() - } - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } - } -} - -class WriteStream extends EE { - constructor (path, opt) { - opt = opt || {} - super(opt) - this.readable = false - this.writable = true - this[_errored] = false - this[_writing] = false - this[_ended] = false - this[_needDrain] = false - this[_queue] = [] - this[_path] = path - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_mode] = opt.mode === undefined ? 0o666 : opt.mode - this[_pos] = typeof opt.start === 'number' ? opt.start : null - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - - // truncating makes no sense when writing into the middle - const defaultFlag = this[_pos] !== null ? 'r+' : 'w' - this[_defaultFlag] = opt.flags === undefined - this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags - - if (this[_fd] === null) - this[_open]() - } - - emit (ev, data) { - if (ev === 'error') { - if (this[_errored]) - return - this[_errored] = true - } - return super.emit(ev, data) - } - - - get fd () { return this[_fd] } - get path () { return this[_path] } - - [_onerror] (er) { - this[_close]() - this[_writing] = true - this.emit('error', er) - } - - [_open] () { - fs.open(this[_path], this[_flags], this[_mode], - (er, fd) => this[_onopen](er, fd)) - } - - [_onopen] (er, fd) { - if (this[_defaultFlag] && - this[_flags] === 'r+' && - er && er.code === 'ENOENT') { - this[_flags] = 'w' - this[_open]() - } else if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_flush]() - } - } - - end (buf, enc) { - if (buf) - this.write(buf, enc) - - this[_ended] = true - - // synthetic after-write logic, where drain/finish live - if (!this[_writing] && !this[_queue].length && - typeof this[_fd] === 'number') - this[_onwrite](null, 0) - return this - } - - write (buf, enc) { - if (typeof buf === 'string') - buf = Buffer.from(buf, enc) - - if (this[_ended]) { - this.emit('error', new Error('write() after end()')) - return false - } - - if (this[_fd] === null || this[_writing] || this[_queue].length) { - this[_queue].push(buf) - this[_needDrain] = true - return false - } - - this[_writing] = true - this[_write](buf) - return true - } - - [_write] (buf) { - fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => - this[_onwrite](er, bw)) - } - - [_onwrite] (er, bw) { - if (er) - this[_onerror](er) - else { - if (this[_pos] !== null) - this[_pos] += bw - if (this[_queue].length) - this[_flush]() - else { - this[_writing] = false - - if (this[_ended] && !this[_finished]) { - this[_finished] = true - this[_close]() - this.emit('finish') - } else if (this[_needDrain]) { - this[_needDrain] = false - this.emit('drain') - } - } - } - } - - [_flush] () { - if (this[_queue].length === 0) { - if (this[_ended]) - this[_onwrite](null, 0) - } else if (this[_queue].length === 1) - this[_write](this[_queue].pop()) - else { - const iovec = this[_queue] - this[_queue] = [] - writev(this[_fd], iovec, this[_pos], - (er, bw) => this[_onwrite](er, bw)) - } - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } -} - -class WriteStreamSync extends WriteStream { - [_open] () { - let fd - // only wrap in a try{} block if we know we'll retry, to avoid - // the rethrow obscuring the error's source frame in most cases. - if (this[_defaultFlag] && this[_flags] === 'r+') { - try { - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - } catch (er) { - if (er.code === 'ENOENT') { - this[_flags] = 'w' - return this[_open]() - } else - throw er - } - } else - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - - this[_onopen](null, fd) - } - - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } - } - - [_write] (buf) { - // throw the original, but try to close if it fails - let threw = true - try { - this[_onwrite](null, - fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) - threw = false - } finally { - if (threw) - try { this[_close]() } catch (_) {} - } - } -} - -exports.ReadStream = ReadStream -exports.ReadStreamSync = ReadStreamSync - -exports.WriteStream = WriteStream -exports.WriteStreamSync = WriteStreamSync diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/package.json deleted file mode 100644 index 2f2436c..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/fs-minipass/package.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "name": "fs-minipass", - "version": "2.1.0", - "main": "index.js", - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "keywords": [], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "repository": { - "type": "git", - "url": "git+https://github.com/npm/fs-minipass.git" - }, - "bugs": { - "url": "https://github.com/npm/fs-minipass/issues" - }, - "homepage": "https://github.com/npm/fs-minipass#readme", - "description": "fs read and write streams based on minipass", - "dependencies": { - "minipass": "^3.0.0" - }, - "devDependencies": { - "mutate-fs": "^2.0.1", - "tap": "^14.6.4" - }, - "files": [ - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">= 8" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE deleted file mode 100644 index 1808eb2..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2017-2022 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js deleted file mode 100644 index dd68492..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js +++ /dev/null @@ -1,214 +0,0 @@ -'use strict' -const LRU = require('lru-cache') -const url = require('url') -const isLambda = require('is-lambda') -const dns = require('./dns.js') - -const AGENT_CACHE = new LRU({ max: 50 }) -const HttpAgent = require('agentkeepalive') -const HttpsAgent = HttpAgent.HttpsAgent - -module.exports = getAgent - -const getAgentTimeout = timeout => - typeof timeout !== 'number' || !timeout ? 0 : timeout + 1 - -const getMaxSockets = maxSockets => maxSockets || 15 - -function getAgent (uri, opts) { - const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url) - const isHttps = parsedUri.protocol === 'https:' - const pxuri = getProxyUri(parsedUri.href, opts) - - // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout - // of zero disables the timeout behavior (OS limits still apply). Else, if - // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that - // the node-fetch-npm timeout will always fire first, giving us more - // consistent errors. - const agentTimeout = getAgentTimeout(opts.timeout) - const agentMaxSockets = getMaxSockets(opts.maxSockets) - - const key = [ - `https:${isHttps}`, - pxuri - ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}` - : '>no-proxy<', - `local-address:${opts.localAddress || '>no-local-address<'}`, - `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`, - `ca:${(isHttps && opts.ca) || '>no-ca<'}`, - `cert:${(isHttps && opts.cert) || '>no-cert<'}`, - `key:${(isHttps && opts.key) || '>no-key<'}`, - `timeout:${agentTimeout}`, - `maxSockets:${agentMaxSockets}`, - ].join(':') - - if (opts.agent != null) { // `agent: false` has special behavior! - return opts.agent - } - - // keep alive in AWS lambda makes no sense - const lambdaAgent = !isLambda ? null - : isHttps ? require('https').globalAgent - : require('http').globalAgent - - if (isLambda && !pxuri) { - return lambdaAgent - } - - if (AGENT_CACHE.peek(key)) { - return AGENT_CACHE.get(key) - } - - if (pxuri) { - const pxopts = isLambda ? { - ...opts, - agent: lambdaAgent, - } : opts - const proxy = getProxy(pxuri, pxopts, isHttps) - AGENT_CACHE.set(key, proxy) - return proxy - } - - const agent = isHttps ? new HttpsAgent({ - maxSockets: agentMaxSockets, - ca: opts.ca, - cert: opts.cert, - key: opts.key, - localAddress: opts.localAddress, - rejectUnauthorized: opts.rejectUnauthorized, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) : new HttpAgent({ - maxSockets: agentMaxSockets, - localAddress: opts.localAddress, - timeout: agentTimeout, - freeSocketTimeout: 15000, - lookup: dns.getLookup(opts.dns), - }) - AGENT_CACHE.set(key, agent) - return agent -} - -function checkNoProxy (uri, opts) { - const host = new url.URL(uri).hostname.split('.').reverse() - let noproxy = (opts.noProxy || getProcessEnv('no_proxy')) - if (typeof noproxy === 'string') { - noproxy = noproxy.split(',').map(n => n.trim()) - } - - return noproxy && noproxy.some(no => { - const noParts = no.split('.').filter(x => x).reverse() - if (!noParts.length) { - return false - } - for (let i = 0; i < noParts.length; i++) { - if (host[i] !== noParts[i]) { - return false - } - } - return true - }) -} - -module.exports.getProcessEnv = getProcessEnv - -function getProcessEnv (env) { - if (!env) { - return - } - - let value - - if (Array.isArray(env)) { - for (const e of env) { - value = process.env[e] || - process.env[e.toUpperCase()] || - process.env[e.toLowerCase()] - if (typeof value !== 'undefined') { - break - } - } - } - - if (typeof env === 'string') { - value = process.env[env] || - process.env[env.toUpperCase()] || - process.env[env.toLowerCase()] - } - - return value -} - -module.exports.getProxyUri = getProxyUri -function getProxyUri (uri, opts) { - const protocol = new url.URL(uri).protocol - - const proxy = opts.proxy || - ( - protocol === 'https:' && - getProcessEnv('https_proxy') - ) || - ( - protocol === 'http:' && - getProcessEnv(['https_proxy', 'http_proxy', 'proxy']) - ) - if (!proxy) { - return null - } - - const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy - - return !checkNoProxy(uri, opts) && parsedProxy -} - -const getAuth = u => - u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`) - : u.username ? decodeURIComponent(u.username) - : null - -const getPath = u => u.pathname + u.search + u.hash - -const HttpProxyAgent = require('http-proxy-agent') -const HttpsProxyAgent = require('https-proxy-agent') -const { SocksProxyAgent } = require('socks-proxy-agent') -module.exports.getProxy = getProxy -function getProxy (proxyUrl, opts, isHttps) { - // our current proxy agents do not support an overridden dns lookup method, so will not - // benefit from the dns cache - const popts = { - host: proxyUrl.hostname, - port: proxyUrl.port, - protocol: proxyUrl.protocol, - path: getPath(proxyUrl), - auth: getAuth(proxyUrl), - ca: opts.ca, - cert: opts.cert, - key: opts.key, - timeout: getAgentTimeout(opts.timeout), - localAddress: opts.localAddress, - maxSockets: getMaxSockets(opts.maxSockets), - rejectUnauthorized: opts.rejectUnauthorized, - } - - if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') { - if (!isHttps) { - return new HttpProxyAgent(popts) - } else { - return new HttpsProxyAgent(popts) - } - } else if (proxyUrl.protocol.startsWith('socks')) { - // socks-proxy-agent uses hostname not host - popts.hostname = popts.host - delete popts.host - return new SocksProxyAgent(popts) - } else { - throw Object.assign( - new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`), - { - code: 'EUNSUPPORTEDPROXY', - url: proxyUrl.href, - } - ) - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js deleted file mode 100644 index dba89d7..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js +++ /dev/null @@ -1,444 +0,0 @@ -const { Request, Response } = require('minipass-fetch') -const Minipass = require('minipass') -const MinipassFlush = require('minipass-flush') -const cacache = require('cacache') -const url = require('url') - -const CachingMinipassPipeline = require('../pipeline.js') -const CachePolicy = require('./policy.js') -const cacheKey = require('./key.js') -const remote = require('../remote.js') - -const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) - -// allow list for request headers that will be written to the cache index -// note: we will also store any request headers -// that are named in a response's vary header -const KEEP_REQUEST_HEADERS = [ - 'accept-charset', - 'accept-encoding', - 'accept-language', - 'accept', - 'cache-control', -] - -// allow list for response headers that will be written to the cache index -// note: we must not store the real response's age header, or when we load -// a cache policy based on the metadata it will think the cached response -// is always stale -const KEEP_RESPONSE_HEADERS = [ - 'cache-control', - 'content-encoding', - 'content-language', - 'content-type', - 'date', - 'etag', - 'expires', - 'last-modified', - 'link', - 'location', - 'pragma', - 'vary', -] - -// return an object containing all metadata to be written to the index -const getMetadata = (request, response, options) => { - const metadata = { - time: Date.now(), - url: request.url, - reqHeaders: {}, - resHeaders: {}, - - // options on which we must match the request and vary the response - options: { - compress: options.compress != null ? options.compress : request.compress, - }, - } - - // only save the status if it's not a 200 or 304 - if (response.status !== 200 && response.status !== 304) { - metadata.status = response.status - } - - for (const name of KEEP_REQUEST_HEADERS) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - - // if the request's host header differs from the host in the url - // we need to keep it, otherwise it's just noise and we ignore it - const host = request.headers.get('host') - const parsedUrl = new url.URL(request.url) - if (host && parsedUrl.host !== host) { - metadata.reqHeaders.host = host - } - - // if the response has a vary header, make sure - // we store the relevant request headers too - if (response.headers.has('vary')) { - const vary = response.headers.get('vary') - // a vary of "*" means every header causes a different response. - // in that scenario, we do not include any additional headers - // as the freshness check will always fail anyway and we don't - // want to bloat the cache indexes - if (vary !== '*') { - // copy any other request headers that will vary the response - const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) - for (const name of varyHeaders) { - if (request.headers.has(name)) { - metadata.reqHeaders[name] = request.headers.get(name) - } - } - } - } - - for (const name of KEEP_RESPONSE_HEADERS) { - if (response.headers.has(name)) { - metadata.resHeaders[name] = response.headers.get(name) - } - } - - return metadata -} - -// symbols used to hide objects that may be lazily evaluated in a getter -const _request = Symbol('request') -const _response = Symbol('response') -const _policy = Symbol('policy') - -class CacheEntry { - constructor ({ entry, request, response, options }) { - if (entry) { - this.key = entry.key - this.entry = entry - // previous versions of this module didn't write an explicit timestamp in - // the metadata, so fall back to the entry's timestamp. we can't use the - // entry timestamp to determine staleness because cacache will update it - // when it verifies its data - this.entry.metadata.time = this.entry.metadata.time || this.entry.time - } else { - this.key = cacheKey(request) - } - - this.options = options - - // these properties are behind getters that lazily evaluate - this[_request] = request - this[_response] = response - this[_policy] = null - } - - // returns a CacheEntry instance that satisfies the given request - // or undefined if no existing entry satisfies - static async find (request, options) { - try { - // compacts the index and returns an array of unique entries - var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { - const entryA = new CacheEntry({ entry: A, options }) - const entryB = new CacheEntry({ entry: B, options }) - return entryA.policy.satisfies(entryB.request) - }, { - validateEntry: (entry) => { - // clean out entries with a buggy content-encoding value - if (entry.metadata && - entry.metadata.resHeaders && - entry.metadata.resHeaders['content-encoding'] === null) { - return false - } - - // if an integrity is null, it needs to have a status specified - if (entry.integrity === null) { - return !!(entry.metadata && entry.metadata.status) - } - - return true - }, - }) - } catch (err) { - // if the compact request fails, ignore the error and return - return - } - - // a cache mode of 'reload' means to behave as though we have no cache - // on the way to the network. return undefined to allow cacheFetch to - // create a brand new request no matter what. - if (options.cache === 'reload') { - return - } - - // find the specific entry that satisfies the request - let match - for (const entry of matches) { - const _entry = new CacheEntry({ - entry, - options, - }) - - if (_entry.policy.satisfies(request)) { - match = _entry - break - } - } - - return match - } - - // if the user made a PUT/POST/PATCH then we invalidate our - // cache for the same url by deleting the index entirely - static async invalidate (request, options) { - const key = cacheKey(request) - try { - await cacache.rm.entry(options.cachePath, key, { removeFully: true }) - } catch (err) { - // ignore errors - } - } - - get request () { - if (!this[_request]) { - this[_request] = new Request(this.entry.metadata.url, { - method: 'GET', - headers: this.entry.metadata.reqHeaders, - ...this.entry.metadata.options, - }) - } - - return this[_request] - } - - get response () { - if (!this[_response]) { - this[_response] = new Response(null, { - url: this.entry.metadata.url, - counter: this.options.counter, - status: this.entry.metadata.status || 200, - headers: { - ...this.entry.metadata.resHeaders, - 'content-length': this.entry.size, - }, - }) - } - - return this[_response] - } - - get policy () { - if (!this[_policy]) { - this[_policy] = new CachePolicy({ - entry: this.entry, - request: this.request, - response: this.response, - options: this.options, - }) - } - - return this[_policy] - } - - // wraps the response in a pipeline that stores the data - // in the cache while the user consumes it - async store (status) { - // if we got a status other than 200, 301, or 308, - // or the CachePolicy forbid storage, append the - // cache status header and return it untouched - if ( - this.request.method !== 'GET' || - ![200, 301, 308].includes(this.response.status) || - !this.policy.storable() - ) { - this.response.headers.set('x-local-cache-status', 'skip') - return this.response - } - - const size = this.response.headers.get('content-length') - const cacheOpts = { - algorithms: this.options.algorithms, - metadata: getMetadata(this.request, this.response, this.options), - size, - integrity: this.options.integrity, - integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, - } - - let body = null - // we only set a body if the status is a 200, redirects are - // stored as metadata only - if (this.response.status === 200) { - let cacheWriteResolve, cacheWriteReject - const cacheWritePromise = new Promise((resolve, reject) => { - cacheWriteResolve = resolve - cacheWriteReject = reject - }) - - body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ - flush () { - return cacheWritePromise - }, - })) - // this is always true since if we aren't reusing the one from the remote fetch, we - // are using the one from cacache - body.hasIntegrityEmitter = true - - const onResume = () => { - const tee = new Minipass() - const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) - // re-emit the integrity and size events on our new response body so they can be reused - cacheStream.on('integrity', i => body.emit('integrity', i)) - cacheStream.on('size', s => body.emit('size', s)) - // stick a flag on here so downstream users will know if they can expect integrity events - tee.pipe(cacheStream) - // TODO if the cache write fails, log a warning but return the response anyway - // eslint-disable-next-line promise/catch-or-return - cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) - body.unshift(tee) - body.unshift(this.response.body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - } else { - await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) - } - - // note: we do not set the x-local-cache-hash header because we do not know - // the hash value until after the write to the cache completes, which doesn't - // happen until after the response has been sent and it's too late to write - // the header anyway - this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - this.response.headers.set('x-local-cache-mode', 'stream') - this.response.headers.set('x-local-cache-status', status) - this.response.headers.set('x-local-cache-time', new Date().toISOString()) - const newResponse = new Response(body, { - url: this.response.url, - status: this.response.status, - headers: this.response.headers, - counter: this.options.counter, - }) - return newResponse - } - - // use the cached data to create a response and return it - async respond (method, options, status) { - let response - if (method === 'HEAD' || [301, 308].includes(this.response.status)) { - // if the request is a HEAD, or the response is a redirect, - // then the metadata in the entry already includes everything - // we need to build a response - response = this.response - } else { - // we're responding with a full cached response, so create a body - // that reads from cacache and attach it to a new Response - const body = new Minipass() - const headers = { ...this.policy.responseHeaders() } - const onResume = () => { - const cacheStream = cacache.get.stream.byDigest( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - cacheStream.on('error', async (err) => { - cacheStream.pause() - if (err.code === 'EINTEGRITY') { - await cacache.rm.content( - this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } - ) - } - if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { - await CacheEntry.invalidate(this.request, this.options) - } - body.emit('error', err) - cacheStream.resume() - }) - // emit the integrity and size events based on our metadata so we're consistent - body.emit('integrity', this.entry.integrity) - body.emit('size', Number(headers['content-length'])) - cacheStream.pipe(body) - } - - body.once('resume', onResume) - body.once('end', () => body.removeListener('resume', onResume)) - response = new Response(body, { - url: this.entry.metadata.url, - counter: options.counter, - status: 200, - headers, - }) - } - - response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) - response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) - response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) - response.headers.set('x-local-cache-mode', 'stream') - response.headers.set('x-local-cache-status', status) - response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) - return response - } - - // use the provided request along with this cache entry to - // revalidate the stored response. returns a response, either - // from the cache or from the update - async revalidate (request, options) { - const revalidateRequest = new Request(request, { - headers: this.policy.revalidationHeaders(request), - }) - - try { - // NOTE: be sure to remove the headers property from the - // user supplied options, since we have already defined - // them on the new request object. if they're still in the - // options then those will overwrite the ones from the policy - var response = await remote(revalidateRequest, { - ...options, - headers: undefined, - }) - } catch (err) { - // if the network fetch fails, return the stale - // cached response unless it has a cache-control - // of 'must-revalidate' - if (!this.policy.mustRevalidate) { - return this.respond(request.method, options, 'stale') - } - - throw err - } - - if (this.policy.revalidated(revalidateRequest, response)) { - // we got a 304, write a new index to the cache and respond from cache - const metadata = getMetadata(request, response, options) - // 304 responses do not include headers that are specific to the response data - // since they do not include a body, so we copy values for headers that were - // in the old cache entry to the new one, if the new metadata does not already - // include that header - for (const name of KEEP_RESPONSE_HEADERS) { - if ( - !hasOwnProperty(metadata.resHeaders, name) && - hasOwnProperty(this.entry.metadata.resHeaders, name) - ) { - metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] - } - } - - try { - await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { - size: this.entry.size, - metadata, - }) - } catch (err) { - // if updating the cache index fails, we ignore it and - // respond anyway - } - return this.respond(request.method, options, 'revalidated') - } - - // if we got a modified response, create a new entry based on it - const newEntry = new CacheEntry({ - request, - response, - options, - }) - - // respond with the new entry while writing it to the cache - return newEntry.store('updated') - } -} - -module.exports = CacheEntry diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js deleted file mode 100644 index 67a6657..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js +++ /dev/null @@ -1,11 +0,0 @@ -class NotCachedError extends Error { - constructor (url) { - /* eslint-disable-next-line max-len */ - super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) - this.code = 'ENOTCACHED' - } -} - -module.exports = { - NotCachedError, -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js deleted file mode 100644 index 0de49d2..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js +++ /dev/null @@ -1,49 +0,0 @@ -const { NotCachedError } = require('./errors.js') -const CacheEntry = require('./entry.js') -const remote = require('../remote.js') - -// do whatever is necessary to get a Response and return it -const cacheFetch = async (request, options) => { - // try to find a cached entry that satisfies this request - const entry = await CacheEntry.find(request, options) - if (!entry) { - // no cached result, if the cache mode is 'only-if-cached' that's a failure - if (options.cache === 'only-if-cached') { - throw new NotCachedError(request.url) - } - - // otherwise, we make a request, store it and return it - const response = await remote(request, options) - const newEntry = new CacheEntry({ request, response, options }) - return newEntry.store('miss') - } - - // we have a cached response that satisfies this request, however if the cache - // mode is 'no-cache' then we send the revalidation request no matter what - if (options.cache === 'no-cache') { - return entry.revalidate(request, options) - } - - // if the cached entry is not stale, or if the cache mode is 'force-cache' or - // 'only-if-cached' we can respond with the cached entry. set the status - // based on the result of needsRevalidation and respond - const _needsRevalidation = entry.policy.needsRevalidation(request) - if (options.cache === 'force-cache' || - options.cache === 'only-if-cached' || - !_needsRevalidation) { - return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') - } - - // if we got here, the cache entry is stale so revalidate it - return entry.revalidate(request, options) -} - -cacheFetch.invalidate = async (request, options) => { - if (!options.cachePath) { - return - } - - return CacheEntry.invalidate(request, options) -} - -module.exports = cacheFetch diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js deleted file mode 100644 index f7684d5..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js +++ /dev/null @@ -1,17 +0,0 @@ -const { URL, format } = require('url') - -// options passed to url.format() when generating a key -const formatOptions = { - auth: false, - fragment: false, - search: true, - unicode: false, -} - -// returns a string to be used as the cache key for the Request -const cacheKey = (request) => { - const parsed = new URL(request.url) - return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` -} - -module.exports = cacheKey diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js deleted file mode 100644 index ada3c86..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js +++ /dev/null @@ -1,161 +0,0 @@ -const CacheSemantics = require('http-cache-semantics') -const Negotiator = require('negotiator') -const ssri = require('ssri') - -// options passed to http-cache-semantics constructor -const policyOptions = { - shared: false, - ignoreCargoCult: true, -} - -// a fake empty response, used when only testing the -// request for storability -const emptyResponse = { status: 200, headers: {} } - -// returns a plain object representation of the Request -const requestObject = (request) => { - const _obj = { - method: request.method, - url: request.url, - headers: {}, - compress: request.compress, - } - - request.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -// returns a plain object representation of the Response -const responseObject = (response) => { - const _obj = { - status: response.status, - headers: {}, - } - - response.headers.forEach((value, key) => { - _obj.headers[key] = value - }) - - return _obj -} - -class CachePolicy { - constructor ({ entry, request, response, options }) { - this.entry = entry - this.request = requestObject(request) - this.response = responseObject(response) - this.options = options - this.policy = new CacheSemantics(this.request, this.response, policyOptions) - - if (this.entry) { - // if we have an entry, copy the timestamp to the _responseTime - // this is necessary because the CacheSemantics constructor forces - // the value to Date.now() which means a policy created from a - // cache entry is likely to always identify itself as stale - this.policy._responseTime = this.entry.metadata.time - } - } - - // static method to quickly determine if a request alone is storable - static storable (request, options) { - // no cachePath means no caching - if (!options.cachePath) { - return false - } - - // user explicitly asked not to cache - if (options.cache === 'no-store') { - return false - } - - // we only cache GET and HEAD requests - if (!['GET', 'HEAD'].includes(request.method)) { - return false - } - - // otherwise, let http-cache-semantics make the decision - // based on the request's headers - const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) - return policy.storable() - } - - // returns true if the policy satisfies the request - satisfies (request) { - const _req = requestObject(request) - if (this.request.headers.host !== _req.headers.host) { - return false - } - - if (this.request.compress !== _req.compress) { - return false - } - - const negotiatorA = new Negotiator(this.request) - const negotiatorB = new Negotiator(_req) - - if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { - return false - } - - if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { - return false - } - - if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { - return false - } - - if (this.options.integrity) { - return ssri.parse(this.options.integrity).match(this.entry.integrity) - } - - return true - } - - // returns true if the request and response allow caching - storable () { - return this.policy.storable() - } - - // NOTE: this is a hack to avoid parsing the cache-control - // header ourselves, it returns true if the response's - // cache-control contains must-revalidate - get mustRevalidate () { - return !!this.policy._rescc['must-revalidate'] - } - - // returns true if the cached response requires revalidation - // for the given request - needsRevalidation (request) { - const _req = requestObject(request) - // force method to GET because we only cache GETs - // but can serve a HEAD from a cached GET - _req.method = 'GET' - return !this.policy.satisfiesWithoutRevalidation(_req) - } - - responseHeaders () { - return this.policy.responseHeaders() - } - - // returns a new object containing the appropriate headers - // to send a revalidation request - revalidationHeaders (request) { - const _req = requestObject(request) - return this.policy.revalidationHeaders(_req) - } - - // returns true if the request/response was revalidated - // successfully. returns false if a new response was received - revalidated (request, response) { - const _req = requestObject(request) - const _res = responseObject(response) - const policy = this.policy.revalidatedPolicy(_req, _res) - return !policy.modified - } -} - -module.exports = CachePolicy diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js deleted file mode 100644 index 13102b5..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/dns.js +++ /dev/null @@ -1,49 +0,0 @@ -const LRUCache = require('lru-cache') -const dns = require('dns') - -const defaultOptions = exports.defaultOptions = { - family: undefined, - hints: dns.ADDRCONFIG, - all: false, - verbatim: undefined, -} - -const lookupCache = exports.lookupCache = new LRUCache({ max: 50 }) - -// this is a factory so that each request can have its own opts (i.e. ttl) -// while still sharing the cache across all requests -exports.getLookup = (dnsOptions) => { - return (hostname, options, callback) => { - if (typeof options === 'function') { - callback = options - options = null - } else if (typeof options === 'number') { - options = { family: options } - } - - options = { ...defaultOptions, ...options } - - const key = JSON.stringify({ - hostname, - family: options.family, - hints: options.hints, - all: options.all, - verbatim: options.verbatim, - }) - - if (lookupCache.has(key)) { - const [address, family] = lookupCache.get(key) - process.nextTick(callback, null, address, family) - return - } - - dnsOptions.lookup(hostname, options, (err, address, family) => { - if (err) { - return callback(err) - } - - lookupCache.set(key, [address, family], { ttl: dnsOptions.ttl }) - return callback(null, address, family) - }) - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js deleted file mode 100644 index 233ba67..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict' - -const { FetchError, Request, isRedirect } = require('minipass-fetch') -const url = require('url') - -const CachePolicy = require('./cache/policy.js') -const cache = require('./cache/index.js') -const remote = require('./remote.js') - -// given a Request, a Response and user options -// return true if the response is a redirect that -// can be followed. we throw errors that will result -// in the fetch being rejected if the redirect is -// possible but invalid for some reason -const canFollowRedirect = (request, response, options) => { - if (!isRedirect(response.status)) { - return false - } - - if (options.redirect === 'manual') { - return false - } - - if (options.redirect === 'error') { - throw new FetchError(`redirect mode is set to error: ${request.url}`, - 'no-redirect', { code: 'ENOREDIRECT' }) - } - - if (!response.headers.has('location')) { - throw new FetchError(`redirect location header missing for: ${request.url}`, - 'no-location', { code: 'EINVALIDREDIRECT' }) - } - - if (request.counter >= request.follow) { - throw new FetchError(`maximum redirect reached at: ${request.url}`, - 'max-redirect', { code: 'EMAXREDIRECT' }) - } - - return true -} - -// given a Request, a Response, and the user's options return an object -// with a new Request and a new options object that will be used for -// following the redirect -const getRedirect = (request, response, options) => { - const _opts = { ...options } - const location = response.headers.get('location') - const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) - // Comment below is used under the following license: - /** - * @license - * Copyright (c) 2010-2012 Mikeal Rogers - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an "AS - * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either - * express or implied. See the License for the specific language - * governing permissions and limitations under the License. - */ - - // Remove authorization if changing hostnames (but not if just - // changing ports or protocols). This matches the behavior of request: - // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 - if (new url.URL(request.url).hostname !== redirectUrl.hostname) { - request.headers.delete('authorization') - request.headers.delete('cookie') - } - - // for POST request with 301/302 response, or any request with 303 response, - // use GET when following redirect - if ( - response.status === 303 || - (request.method === 'POST' && [301, 302].includes(response.status)) - ) { - _opts.method = 'GET' - _opts.body = null - request.headers.delete('content-length') - } - - _opts.headers = {} - request.headers.forEach((value, key) => { - _opts.headers[key] = value - }) - - _opts.counter = ++request.counter - const redirectReq = new Request(url.format(redirectUrl), _opts) - return { - request: redirectReq, - options: _opts, - } -} - -const fetch = async (request, options) => { - const response = CachePolicy.storable(request, options) - ? await cache(request, options) - : await remote(request, options) - - // if the request wasn't a GET or HEAD, and the response - // status is between 200 and 399 inclusive, invalidate the - // request url - if (!['GET', 'HEAD'].includes(request.method) && - response.status >= 200 && - response.status <= 399) { - await cache.invalidate(request, options) - } - - if (!canFollowRedirect(request, response, options)) { - return response - } - - const redirect = getRedirect(request, response, options) - return fetch(redirect.request, redirect.options) -} - -module.exports = fetch diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js deleted file mode 100644 index 2f12e8e..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js +++ /dev/null @@ -1,41 +0,0 @@ -const { FetchError, Headers, Request, Response } = require('minipass-fetch') - -const configureOptions = require('./options.js') -const fetch = require('./fetch.js') - -const makeFetchHappen = (url, opts) => { - const options = configureOptions(opts) - - const request = new Request(url, options) - return fetch(request, options) -} - -makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { - if (typeof defaultUrl === 'object') { - defaultOptions = defaultUrl - defaultUrl = null - } - - const defaultedFetch = (url, options = {}) => { - const finalUrl = url || defaultUrl - const finalOptions = { - ...defaultOptions, - ...options, - headers: { - ...defaultOptions.headers, - ...options.headers, - }, - } - return wrappedFetch(finalUrl, finalOptions) - } - - defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => - makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) - return defaultedFetch -} - -module.exports = makeFetchHappen -module.exports.FetchError = FetchError -module.exports.Headers = Headers -module.exports.Request = Request -module.exports.Response = Response diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js deleted file mode 100644 index daa9ecd..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js +++ /dev/null @@ -1,52 +0,0 @@ -const dns = require('dns') - -const conditionalHeaders = [ - 'if-modified-since', - 'if-none-match', - 'if-unmodified-since', - 'if-match', - 'if-range', -] - -const configureOptions = (opts) => { - const { strictSSL, ...options } = { ...opts } - options.method = options.method ? options.method.toUpperCase() : 'GET' - options.rejectUnauthorized = strictSSL !== false - - if (!options.retry) { - options.retry = { retries: 0 } - } else if (typeof options.retry === 'string') { - const retries = parseInt(options.retry, 10) - if (isFinite(retries)) { - options.retry = { retries } - } else { - options.retry = { retries: 0 } - } - } else if (typeof options.retry === 'number') { - options.retry = { retries: options.retry } - } else { - options.retry = { retries: 0, ...options.retry } - } - - options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } - - options.cache = options.cache || 'default' - if (options.cache === 'default') { - const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { - return conditionalHeaders.includes(name.toLowerCase()) - }) - if (hasConditionalHeader) { - options.cache = 'no-store' - } - } - - // cacheManager is deprecated, but if it's set and - // cachePath is not we should copy it to the new field - if (options.cacheManager && !options.cachePath) { - options.cachePath = options.cacheManager - } - - return options -} - -module.exports = configureOptions diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js deleted file mode 100644 index b1d221b..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict' - -const MinipassPipeline = require('minipass-pipeline') - -class CachingMinipassPipeline extends MinipassPipeline { - #events = [] - #data = new Map() - - constructor (opts, ...streams) { - // CRITICAL: do NOT pass the streams to the call to super(), this will start - // the flow of data and potentially cause the events we need to catch to emit - // before we've finished our own setup. instead we call super() with no args, - // finish our setup, and then push the streams into ourselves to start the - // data flow - super() - this.#events = opts.events - - /* istanbul ignore next - coverage disabled because this is pointless to test here */ - if (streams.length) { - this.push(...streams) - } - } - - on (event, handler) { - if (this.#events.includes(event) && this.#data.has(event)) { - return handler(...this.#data.get(event)) - } - - return super.on(event, handler) - } - - emit (event, ...data) { - if (this.#events.includes(event)) { - this.#data.set(event, data) - } - - return super.emit(event, ...data) - } -} - -module.exports = CachingMinipassPipeline diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js deleted file mode 100644 index 068c73a..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js +++ /dev/null @@ -1,121 +0,0 @@ -const Minipass = require('minipass') -const fetch = require('minipass-fetch') -const promiseRetry = require('promise-retry') -const ssri = require('ssri') - -const CachingMinipassPipeline = require('./pipeline.js') -const getAgent = require('./agent.js') -const pkg = require('../package.json') - -const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` - -const RETRY_ERRORS = [ - 'ECONNRESET', // remote socket closed on us - 'ECONNREFUSED', // remote host refused to open connection - 'EADDRINUSE', // failed to bind to a local port (proxy?) - 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW - 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive - // Known codes we do NOT retry on: - // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) -] - -const RETRY_TYPES = [ - 'request-timeout', -] - -// make a request directly to the remote source, -// retrying certain classes of errors as well as -// following redirects (through the cache if necessary) -// and verifying response integrity -const remoteFetch = (request, options) => { - const agent = getAgent(request.url, options) - if (!request.headers.has('connection')) { - request.headers.set('connection', agent ? 'keep-alive' : 'close') - } - - if (!request.headers.has('user-agent')) { - request.headers.set('user-agent', USER_AGENT) - } - - // keep our own options since we're overriding the agent - // and the redirect mode - const _opts = { - ...options, - agent, - redirect: 'manual', - } - - return promiseRetry(async (retryHandler, attemptNum) => { - const req = new fetch.Request(request, _opts) - try { - let res = await fetch(req, _opts) - if (_opts.integrity && res.status === 200) { - // we got a 200 response and the user has specified an expected - // integrity value, so wrap the response in an ssri stream to verify it - const integrityStream = ssri.integrityStream({ - algorithms: _opts.algorithms, - integrity: _opts.integrity, - size: _opts.size, - }) - const pipeline = new CachingMinipassPipeline({ - events: ['integrity', 'size'], - }, res.body, integrityStream) - // we also propagate the integrity and size events out to the pipeline so we can use - // this new response body as an integrityEmitter for cacache - integrityStream.on('integrity', i => pipeline.emit('integrity', i)) - integrityStream.on('size', s => pipeline.emit('size', s)) - res = new fetch.Response(pipeline, res) - // set an explicit flag so we know if our response body will emit integrity and size - res.body.hasIntegrityEmitter = true - } - - res.headers.set('x-fetch-attempts', attemptNum) - - // do not retry POST requests, or requests with a streaming body - // do retry requests with a 408, 420, 429 or 500+ status in the response - const isStream = Minipass.isStream(req.body) - const isRetriable = req.method !== 'POST' && - !isStream && - ([408, 420, 429].includes(res.status) || res.status >= 500) - - if (isRetriable) { - if (typeof options.onRetry === 'function') { - options.onRetry(res) - } - - return retryHandler(res) - } - - return res - } catch (err) { - const code = (err.code === 'EPROMISERETRY') - ? err.retried.code - : err.code - - // err.retried will be the thing that was thrown from above - // if it's a response, we just got a bad status code and we - // can re-throw to allow the retry - const isRetryError = err.retried instanceof fetch.Response || - (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) - - if (req.method === 'POST' || isRetryError) { - throw err - } - - if (typeof options.onRetry === 'function') { - options.onRetry(err) - } - - return retryHandler(err) - } - }, options.retry).catch((err) => { - // don't reject for http errors, just return them - if (err.status >= 400 && err.type !== 'system') { - return err - } - - throw err - }) -} - -module.exports = remoteFetch diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json deleted file mode 100644 index fc491d1..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json +++ /dev/null @@ -1,79 +0,0 @@ -{ - "name": "make-fetch-happen", - "version": "10.2.1", - "description": "Opinionated, caching, retrying fetch client", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "test": "tap", - "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.js\"", - "lintfix": "npm run lint -- --fix", - "postlint": "template-oss-check", - "snap": "tap", - "template-oss-apply": "template-oss-apply --force" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" - }, - "keywords": [ - "http", - "request", - "fetch", - "mean girls", - "caching", - "cache", - "subresource integrity" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.2.1", - "cacache": "^16.1.0", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^5.0.0", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^7.7.1", - "minipass": "^3.1.6", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^2.0.3", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^7.0.0", - "ssri": "^9.0.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "mkdirp": "^1.0.4", - "nock": "^13.2.4", - "rimraf": "^3.0.2", - "safe-buffer": "^5.2.1", - "standard-version": "^9.3.2", - "tap": "^16.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "tap": { - "color": 1, - "files": "test/*.js", - "check-coverage": true, - "timeout": 60 - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE deleted file mode 100644 index 3c3410c..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE +++ /dev/null @@ -1,28 +0,0 @@ -The MIT License (MIT) - -Copyright (c) Isaac Z. Schlueter and Contributors -Copyright (c) 2016 David Frank - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - ---- - -Note: This is a derivative work based on "node-fetch" by David Frank, -modified and distributed under the terms of the MIT license above. -https://github.com/bitinn/node-fetch diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js deleted file mode 100644 index b18f643..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict' -class AbortError extends Error { - constructor (message) { - super(message) - this.code = 'FETCH_ABORTED' - this.type = 'aborted' - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'AbortError' - } - - // don't allow name to be overridden, but don't throw either - set name (s) {} -} -module.exports = AbortError diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js deleted file mode 100644 index efe69a3..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js +++ /dev/null @@ -1,97 +0,0 @@ -'use strict' -const Minipass = require('minipass') -const TYPE = Symbol('type') -const BUFFER = Symbol('buffer') - -class Blob { - constructor (blobParts, options) { - this[TYPE] = '' - - const buffers = [] - let size = 0 - - if (blobParts) { - const a = blobParts - const length = Number(a.length) - for (let i = 0; i < length; i++) { - const element = a[i] - const buffer = element instanceof Buffer ? element - : ArrayBuffer.isView(element) - ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) - : element instanceof ArrayBuffer ? Buffer.from(element) - : element instanceof Blob ? element[BUFFER] - : typeof element === 'string' ? Buffer.from(element) - : Buffer.from(String(element)) - size += buffer.length - buffers.push(buffer) - } - } - - this[BUFFER] = Buffer.concat(buffers, size) - - const type = options && options.type !== undefined - && String(options.type).toLowerCase() - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type - } - } - - get size () { - return this[BUFFER].length - } - - get type () { - return this[TYPE] - } - - text () { - return Promise.resolve(this[BUFFER].toString()) - } - - arrayBuffer () { - const buf = this[BUFFER] - const off = buf.byteOffset - const len = buf.byteLength - const ab = buf.buffer.slice(off, off + len) - return Promise.resolve(ab) - } - - stream () { - return new Minipass().end(this[BUFFER]) - } - - slice (start, end, type) { - const size = this.size - const relativeStart = start === undefined ? 0 - : start < 0 ? Math.max(size + start, 0) - : Math.min(start, size) - const relativeEnd = end === undefined ? size - : end < 0 ? Math.max(size + end, 0) - : Math.min(end, size) - const span = Math.max(relativeEnd - relativeStart, 0) - - const buffer = this[BUFFER] - const slicedBuffer = buffer.slice( - relativeStart, - relativeStart + span - ) - const blob = new Blob([], { type }) - blob[BUFFER] = slicedBuffer - return blob - } - - get [Symbol.toStringTag] () { - return 'Blob' - } - - static get BUFFER () { - return BUFFER - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, -}) - -module.exports = Blob diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js deleted file mode 100644 index 9d1b45d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js +++ /dev/null @@ -1,350 +0,0 @@ -'use strict' -const Minipass = require('minipass') -const MinipassSized = require('minipass-sized') - -const Blob = require('./blob.js') -const { BUFFER } = Blob -const FetchError = require('./fetch-error.js') - -// optional dependency on 'encoding' -let convert -try { - convert = require('encoding').convert -} catch (e) { - // defer error until textConverted is called -} - -const INTERNALS = Symbol('Body internals') -const CONSUME_BODY = Symbol('consumeBody') - -class Body { - constructor (bodyArg, options = {}) { - const { size = 0, timeout = 0 } = options - const body = bodyArg === undefined || bodyArg === null ? null - : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) - : isBlob(bodyArg) ? bodyArg - : Buffer.isBuffer(bodyArg) ? bodyArg - : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' - ? Buffer.from(bodyArg) - : ArrayBuffer.isView(bodyArg) - ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) - : Minipass.isStream(bodyArg) ? bodyArg - : Buffer.from(String(bodyArg)) - - this[INTERNALS] = { - body, - disturbed: false, - error: null, - } - - this.size = size - this.timeout = timeout - - if (Minipass.isStream(body)) { - body.on('error', er => { - const error = er.name === 'AbortError' ? er - : new FetchError(`Invalid response while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - this[INTERNALS].error = error - }) - } - } - - get body () { - return this[INTERNALS].body - } - - get bodyUsed () { - return this[INTERNALS].disturbed - } - - arrayBuffer () { - return this[CONSUME_BODY]().then(buf => - buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) - } - - blob () { - const ct = this.headers && this.headers.get('content-type') || '' - return this[CONSUME_BODY]().then(buf => Object.assign( - new Blob([], { type: ct.toLowerCase() }), - { [BUFFER]: buf } - )) - } - - async json () { - const buf = await this[CONSUME_BODY]() - try { - return JSON.parse(buf.toString()) - } catch (er) { - throw new FetchError( - `invalid json response body at ${this.url} reason: ${er.message}`, - 'invalid-json' - ) - } - } - - text () { - return this[CONSUME_BODY]().then(buf => buf.toString()) - } - - buffer () { - return this[CONSUME_BODY]() - } - - textConverted () { - return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) - } - - [CONSUME_BODY] () { - if (this[INTERNALS].disturbed) { - return Promise.reject(new TypeError(`body used already for: ${ - this.url}`)) - } - - this[INTERNALS].disturbed = true - - if (this[INTERNALS].error) { - return Promise.reject(this[INTERNALS].error) - } - - // body is null - if (this.body === null) { - return Promise.resolve(Buffer.alloc(0)) - } - - if (Buffer.isBuffer(this.body)) { - return Promise.resolve(this.body) - } - - const upstream = isBlob(this.body) ? this.body.stream() : this.body - - /* istanbul ignore if: should never happen */ - if (!Minipass.isStream(upstream)) { - return Promise.resolve(Buffer.alloc(0)) - } - - const stream = this.size && upstream instanceof MinipassSized ? upstream - : !this.size && upstream instanceof Minipass && - !(upstream instanceof MinipassSized) ? upstream - : this.size ? new MinipassSized({ size: this.size }) - : new Minipass() - - // allow timeout on slow response body, but only if the stream is still writable. this - // makes the timeout center on the socket stream from lib/index.js rather than the - // intermediary minipass stream we create to receive the data - const resTimeout = this.timeout && stream.writable ? setTimeout(() => { - stream.emit('error', new FetchError( - `Response timeout while trying to fetch ${ - this.url} (over ${this.timeout}ms)`, 'body-timeout')) - }, this.timeout) : null - - // do not keep the process open just for this timeout, even - // though we expect it'll get cleared eventually. - if (resTimeout && resTimeout.unref) { - resTimeout.unref() - } - - // do the pipe in the promise, because the pipe() can send too much - // data through right away and upset the MP Sized object - return new Promise((resolve, reject) => { - // if the stream is some other kind of stream, then pipe through a MP - // so we can collect it more easily. - if (stream !== upstream) { - upstream.on('error', er => stream.emit('error', er)) - upstream.pipe(stream) - } - resolve() - }).then(() => stream.concat()).then(buf => { - clearTimeout(resTimeout) - return buf - }).catch(er => { - clearTimeout(resTimeout) - // request was aborted, reject with this Error - if (er.name === 'AbortError' || er.name === 'FetchError') { - throw er - } else if (er.name === 'RangeError') { - throw new FetchError(`Could not create Buffer from response body for ${ - this.url}: ${er.message}`, 'system', er) - } else { - // other errors, such as incorrect content-encoding or content-length - throw new FetchError(`Invalid response body while trying to fetch ${ - this.url}: ${er.message}`, 'system', er) - } - }) - } - - static clone (instance) { - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used') - } - - const body = instance.body - - // check that body is a stream and not form-data object - // NB: can't clone the form-data object without having it as a dependency - if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { - // create a dedicated tee stream so that we don't lose data - // potentially sitting in the body stream's buffer by writing it - // immediately to p1 and not having it for p2. - const tee = new Minipass() - const p1 = new Minipass() - const p2 = new Minipass() - tee.on('error', er => { - p1.emit('error', er) - p2.emit('error', er) - }) - body.on('error', er => tee.emit('error', er)) - tee.pipe(p1) - tee.pipe(p2) - body.pipe(tee) - // set instance body to one fork, return the other - instance[INTERNALS].body = p1 - return p2 - } else { - return instance.body - } - } - - static extractContentType (body) { - return body === null || body === undefined ? null - : typeof body === 'string' ? 'text/plain;charset=UTF-8' - : isURLSearchParams(body) - ? 'application/x-www-form-urlencoded;charset=UTF-8' - : isBlob(body) ? body.type || null - : Buffer.isBuffer(body) ? null - : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null - : ArrayBuffer.isView(body) ? null - : typeof body.getBoundary === 'function' - ? `multipart/form-data;boundary=${body.getBoundary()}` - : Minipass.isStream(body) ? null - : 'text/plain;charset=UTF-8' - } - - static getTotalBytes (instance) { - const { body } = instance - return (body === null || body === undefined) ? 0 - : isBlob(body) ? body.size - : Buffer.isBuffer(body) ? body.length - : body && typeof body.getLengthSync === 'function' && ( - // detect form data input from form-data module - body._lengthRetrievers && - /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) // 2.x - ? body.getLengthSync() - : null - } - - static writeToStream (dest, instance) { - const { body } = instance - - if (body === null || body === undefined) { - dest.end() - } else if (Buffer.isBuffer(body) || typeof body === 'string') { - dest.end(body) - } else { - // body is stream or blob - const stream = isBlob(body) ? body.stream() : body - stream.on('error', er => dest.emit('error', er)).pipe(dest) - } - - return dest - } -} - -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true }, -}) - -const isURLSearchParams = obj => - // Duck-typing as a necessary condition. - (typeof obj !== 'object' || - typeof obj.append !== 'function' || - typeof obj.delete !== 'function' || - typeof obj.get !== 'function' || - typeof obj.getAll !== 'function' || - typeof obj.has !== 'function' || - typeof obj.set !== 'function') ? false - // Brand-checking and more duck-typing as optional condition. - : obj.constructor.name === 'URLSearchParams' || - Object.prototype.toString.call(obj) === '[object URLSearchParams]' || - typeof obj.sort === 'function' - -const isBlob = obj => - typeof obj === 'object' && - typeof obj.arrayBuffer === 'function' && - typeof obj.type === 'string' && - typeof obj.stream === 'function' && - typeof obj.constructor === 'function' && - typeof obj.constructor.name === 'string' && - /^(Blob|File)$/.test(obj.constructor.name) && - /^(Blob|File)$/.test(obj[Symbol.toStringTag]) - -const convertBody = (buffer, headers) => { - /* istanbul ignore if */ - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function') - } - - const ct = headers && headers.get('content-type') - let charset = 'utf-8' - let res - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct) - } - - // no charset in content type, peek at response body for at most 1024 bytes - const str = buffer.slice(0, 1024).toString() - - // html5 - if (!res && str) { - res = / this.expect - ? 'max-size' : type - this.message = message - Error.captureStackTrace(this, this.constructor) - } - - get name () { - return 'FetchError' - } - - // don't allow name to be overwritten - set name (n) {} - - get [Symbol.toStringTag] () { - return 'FetchError' - } -} -module.exports = FetchError diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js deleted file mode 100644 index dd6e854..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js +++ /dev/null @@ -1,267 +0,0 @@ -'use strict' -const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ - -const validateName = name => { - name = `${name}` - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`) - } -} - -const validateValue = value => { - value = `${value}` - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`) - } -} - -const find = (map, name) => { - name = name.toLowerCase() - for (const key in map) { - if (key.toLowerCase() === name) { - return key - } - } - return undefined -} - -const MAP = Symbol('map') -class Headers { - constructor (init = undefined) { - this[MAP] = Object.create(null) - if (init instanceof Headers) { - const rawHeaders = init.raw() - const headerNames = Object.keys(rawHeaders) - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value) - } - } - return - } - - // no-op - if (init === undefined || init === null) { - return - } - - if (typeof init === 'object') { - const method = init[Symbol.iterator] - if (method !== null && method !== undefined) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable') - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = [] - for (const pair of init) { - if (typeof pair !== 'object' || - typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable') - } - const arrPair = Array.from(pair) - if (arrPair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple') - } - pairs.push(arrPair) - } - - for (const pair of pairs) { - this.append(pair[0], pair[1]) - } - } else { - // record - for (const key of Object.keys(init)) { - this.append(key, init[key]) - } - } - } else { - throw new TypeError('Provided initializer must be an object') - } - } - - get (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key === undefined) { - return null - } - - return this[MAP][key].join(', ') - } - - forEach (callback, thisArg = undefined) { - let pairs = getHeaders(this) - for (let i = 0; i < pairs.length; i++) { - const [name, value] = pairs[i] - callback.call(thisArg, value, name, this) - // refresh in case the callback added more headers - pairs = getHeaders(this) - } - } - - set (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - this[MAP][key !== undefined ? key : name] = [value] - } - - append (name, value) { - name = `${name}` - value = `${value}` - validateName(name) - validateValue(value) - const key = find(this[MAP], name) - if (key !== undefined) { - this[MAP][key].push(value) - } else { - this[MAP][name] = [value] - } - } - - has (name) { - name = `${name}` - validateName(name) - return find(this[MAP], name) !== undefined - } - - delete (name) { - name = `${name}` - validateName(name) - const key = find(this[MAP], name) - if (key !== undefined) { - delete this[MAP][key] - } - } - - raw () { - return this[MAP] - } - - keys () { - return new HeadersIterator(this, 'key') - } - - values () { - return new HeadersIterator(this, 'value') - } - - [Symbol.iterator] () { - return new HeadersIterator(this, 'key+value') - } - - entries () { - return new HeadersIterator(this, 'key+value') - } - - get [Symbol.toStringTag] () { - return 'Headers' - } - - static exportNodeCompatibleHeaders (headers) { - const obj = Object.assign(Object.create(null), headers[MAP]) - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host') - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0] - } - - return obj - } - - static createHeadersLenient (obj) { - const headers = new Headers() - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue - } - - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue - } - - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val] - } else { - headers[MAP][name].push(val) - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]] - } - } - return headers - } -} - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true }, -}) - -const getHeaders = (headers, kind = 'key+value') => - Object.keys(headers[MAP]).sort().map( - kind === 'key' ? k => k.toLowerCase() - : kind === 'value' ? k => headers[MAP][k].join(', ') - : k => [k.toLowerCase(), headers[MAP][k].join(', ')] - ) - -const INTERNAL = Symbol('internal') - -class HeadersIterator { - constructor (target, kind) { - this[INTERNAL] = { - target, - kind, - index: 0, - } - } - - get [Symbol.toStringTag] () { - return 'HeadersIterator' - } - - next () { - /* istanbul ignore if: should be impossible */ - if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { - throw new TypeError('Value of `this` is not a HeadersIterator') - } - - const { target, kind, index } = this[INTERNAL] - const values = getHeaders(target, kind) - const len = values.length - if (index >= len) { - return { - value: undefined, - done: true, - } - } - - this[INTERNAL].index++ - - return { value: values[index], done: false } - } -} - -// manually extend because 'extends' requires a ctor -Object.setPrototypeOf(HeadersIterator.prototype, - Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) - -module.exports = Headers diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js deleted file mode 100644 index b1878ac..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js +++ /dev/null @@ -1,365 +0,0 @@ -'use strict' -const { URL } = require('url') -const http = require('http') -const https = require('https') -const zlib = require('minizlib') -const Minipass = require('minipass') - -const Body = require('./body.js') -const { writeToStream, getTotalBytes } = Body -const Response = require('./response.js') -const Headers = require('./headers.js') -const { createHeadersLenient } = Headers -const Request = require('./request.js') -const { getNodeRequestOptions } = Request -const FetchError = require('./fetch-error.js') -const AbortError = require('./abort-error.js') - -// XXX this should really be split up and unit-ized for easier testing -// and better DRY implementation of data/http request aborting -const fetch = async (url, opts) => { - if (/^data:/.test(url)) { - const request = new Request(url, opts) - // delay 1 promise tick so that the consumer can abort right away - return Promise.resolve().then(() => new Promise((resolve, reject) => { - let type, data - try { - const { pathname, search } = new URL(url) - const split = pathname.split(',') - if (split.length < 2) { - throw new Error('invalid data: URI') - } - const mime = split.shift() - const base64 = /;base64$/.test(mime) - type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime - const rawData = decodeURIComponent(split.join(',') + search) - data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) - } catch (er) { - return reject(new FetchError(`[${request.method}] ${ - request.url} invalid URL, ${er.message}`, 'system', er)) - } - - const { signal } = request - if (signal && signal.aborted) { - return reject(new AbortError('The user aborted a request.')) - } - - const headers = { 'Content-Length': data.length } - if (type) { - headers['Content-Type'] = type - } - return resolve(new Response(data, { headers })) - })) - } - - return new Promise((resolve, reject) => { - // build request object - const request = new Request(url, opts) - let options - try { - options = getNodeRequestOptions(request) - } catch (er) { - return reject(er) - } - - const send = (options.protocol === 'https:' ? https : http).request - const { signal } = request - let response = null - const abort = () => { - const error = new AbortError('The user aborted a request.') - reject(error) - if (Minipass.isStream(request.body) && - typeof request.body.destroy === 'function') { - request.body.destroy(error) - } - if (response && response.body) { - response.body.emit('error', error) - } - } - - if (signal && signal.aborted) { - return abort() - } - - const abortAndFinalize = () => { - abort() - finalize() - } - - const finalize = () => { - req.abort() - if (signal) { - signal.removeEventListener('abort', abortAndFinalize) - } - clearTimeout(reqTimeout) - } - - // send request - const req = send(options) - - if (signal) { - signal.addEventListener('abort', abortAndFinalize) - } - - let reqTimeout = null - if (request.timeout) { - req.once('socket', socket => { - reqTimeout = setTimeout(() => { - reject(new FetchError(`network timeout at: ${ - request.url}`, 'request-timeout')) - finalize() - }, request.timeout) - }) - } - - req.on('error', er => { - // if a 'response' event is emitted before the 'error' event, then by the - // time this handler is run it's too late to reject the Promise for the - // response. instead, we forward the error event to the response stream - // so that the error will surface to the user when they try to consume - // the body. this is done as a side effect of aborting the request except - // for in windows, where we must forward the event manually, otherwise - // there is no longer a ref'd socket attached to the request and the - // stream never ends so the event loop runs out of work and the process - // exits without warning. - // coverage skipped here due to the difficulty in testing - // istanbul ignore next - if (req.res) { - req.res.emit('error', er) - } - reject(new FetchError(`request to ${request.url} failed, reason: ${ - er.message}`, 'system', er)) - finalize() - }) - - req.on('response', res => { - clearTimeout(reqTimeout) - - const headers = createHeadersLenient(res.headers) - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location') - - // HTTP fetch step 5.3 - const locationURL = location === null ? null - : (new URL(location, request.url)).toString() - - // HTTP fetch step 5.5 - if (request.redirect === 'error') { - reject(new FetchError('uri requested responds with a redirect, ' + - `redirect mode is set to error: ${request.url}`, 'no-redirect')) - finalize() - return - } else if (request.redirect === 'manual') { - // node-fetch-specific step: make manual redirect a bit easier to - // use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL) - } catch (err) { - /* istanbul ignore next: nodejs server prevent invalid - response headers, we can't test this through normal - request */ - reject(err) - } - } - } else if (request.redirect === 'follow' && locationURL !== null) { - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${ - request.url}`, 'max-redirect')) - finalize() - return - } - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && - request.body && - getTotalBytes(request) === null) { - reject(new FetchError( - 'Cannot follow redirect with body being a readable stream', - 'unsupported-redirect' - )) - finalize() - return - } - - // Update host due to redirection - request.headers.set('host', (new URL(locationURL)).host) - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - } - - // if the redirect is to a new hostname, strip the authorization and cookie headers - const parsedOriginal = new URL(request.url) - const parsedRedirect = new URL(locationURL) - if (parsedOriginal.hostname !== parsedRedirect.hostname) { - requestOpts.headers.delete('authorization') - requestOpts.headers.delete('cookie') - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || ( - (res.statusCode === 301 || res.statusCode === 302) && - request.method === 'POST' - )) { - requestOpts.method = 'GET' - requestOpts.body = undefined - requestOpts.headers.delete('content-length') - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))) - finalize() - return - } - } // end if(isRedirect) - - // prepare response - res.once('end', () => - signal && signal.removeEventListener('abort', abortAndFinalize)) - - const body = new Minipass() - // if an error occurs, either on the response stream itself, on one of the - // decoder streams, or a response length timeout from the Body class, we - // forward the error through to our internal body stream. If we see an - // error event on that, we call finalize to abort the request and ensure - // we don't leave a socket believing a request is in flight. - // this is difficult to test, so lacks specific coverage. - body.on('error', finalize) - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) - res.on('data', (chunk) => body.write(chunk)) - res.on('end', () => body.end()) - - const responseOptions = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter, - trailer: new Promise(resolveTrailer => - res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), - } - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding') - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || - request.method === 'HEAD' || - codings === null || - res.statusCode === 204 || - res.statusCode === 304) { - response = new Response(body, responseOptions) - resolve(response) - return - } - - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.constants.Z_SYNC_FLUSH, - finishFlush: zlib.constants.Z_SYNC_FLUSH, - } - - // for gzip - if (codings === 'gzip' || codings === 'x-gzip') { - const unzip = new zlib.Gunzip(zlibOptions) - response = new Response( - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), - responseOptions - ) - resolve(response) - return - } - - // for deflate - if (codings === 'deflate' || codings === 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new Minipass()) - raw.once('data', chunk => { - // see http://stackoverflow.com/questions/37519828 - const decoder = (chunk[0] & 0x0F) === 0x08 - ? new zlib.Inflate() - : new zlib.InflateRaw() - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - }) - return - } - - // for br - if (codings === 'br') { - // ignoring coverage so tests don't have to fake support (or lack of) for brotli - // istanbul ignore next - try { - var decoder = new zlib.BrotliDecompress() - } catch (err) { - reject(err) - finalize() - return - } - // exceedingly rare that the stream would have an error, - // but just in case we proxy it to the stream in use. - body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) - response = new Response(decoder, responseOptions) - resolve(response) - return - } - - // otherwise, use response as-is - response = new Response(body, responseOptions) - resolve(response) - }) - - writeToStream(req, request) - }) -} - -module.exports = fetch - -fetch.isRedirect = code => - code === 301 || - code === 302 || - code === 303 || - code === 307 || - code === 308 - -fetch.Headers = Headers -fetch.Request = Request -fetch.Response = Response -fetch.FetchError = FetchError -fetch.AbortError = AbortError diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js deleted file mode 100644 index e620df6..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js +++ /dev/null @@ -1,281 +0,0 @@ -'use strict' -const { URL } = require('url') -const Minipass = require('minipass') -const Headers = require('./headers.js') -const { exportNodeCompatibleHeaders } = Headers -const Body = require('./body.js') -const { clone, extractContentType, getTotalBytes } = Body - -const version = require('../package.json').version -const defaultUserAgent = - `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` - -const INTERNALS = Symbol('Request internals') - -const isRequest = input => - typeof input === 'object' && typeof input[INTERNALS] === 'object' - -const isAbortSignal = signal => { - const proto = ( - signal - && typeof signal === 'object' - && Object.getPrototypeOf(signal) - ) - return !!(proto && proto.constructor.name === 'AbortSignal') -} - -class Request extends Body { - constructor (input, init = {}) { - const parsedURL = isRequest(input) ? new URL(input.url) - : input && input.href ? new URL(input.href) - : new URL(`${input}`) - - if (isRequest(input)) { - init = { ...input[INTERNALS], ...init } - } else if (!input || typeof input === 'string') { - input = {} - } - - const method = (init.method || input.method || 'GET').toUpperCase() - const isGETHEAD = method === 'GET' || method === 'HEAD' - - if ((init.body !== null && init.body !== undefined || - isRequest(input) && input.body !== null) && isGETHEAD) { - throw new TypeError('Request with GET/HEAD method cannot have body') - } - - const inputBody = init.body !== null && init.body !== undefined ? init.body - : isRequest(input) && input.body !== null ? clone(input) - : null - - super(inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0, - }) - - const headers = new Headers(init.headers || input.headers || {}) - - if (inputBody !== null && inputBody !== undefined && - !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - const signal = 'signal' in init ? init.signal - : null - - if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { - throw new TypeError('Expected signal must be an instanceof AbortSignal') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = init - - this[INTERNALS] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow - : input.follow !== undefined ? input.follow - : 20 - this.compress = init.compress !== undefined ? init.compress - : input.compress !== undefined ? input.compress - : true - this.counter = init.counter || input.counter || 0 - this.agent = init.agent || input.agent - } - - get method () { - return this[INTERNALS].method - } - - get url () { - return this[INTERNALS].parsedURL.toString() - } - - get headers () { - return this[INTERNALS].headers - } - - get redirect () { - return this[INTERNALS].redirect - } - - get signal () { - return this[INTERNALS].signal - } - - clone () { - return new Request(this) - } - - get [Symbol.toStringTag] () { - return 'Request' - } - - static getNodeRequestOptions (request) { - const parsedURL = request[INTERNALS].parsedURL - const headers = new Headers(request[INTERNALS].headers) - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*') - } - - // Basic fetch - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported') - } - - if (request.signal && - Minipass.isStream(request.body) && - typeof request.body.destroy !== 'function') { - throw new Error( - 'Cancellation of streamed requests with AbortSignal is not supported') - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - const contentLengthValue = - (request.body === null || request.body === undefined) && - /^(POST|PUT)$/i.test(request.method) ? '0' - : request.body !== null && request.body !== undefined - ? getTotalBytes(request) - : null - - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue + '') - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', defaultUserAgent) - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate') - } - - const agent = typeof request.agent === 'function' - ? request.agent(parsedURL) - : request.agent - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close') - } - - // TLS specific options that are handled by node - const { - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } = request[INTERNALS] - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - // we cannot spread parsedURL directly, so we have to read each property one-by-one - // and map them to the equivalent https?.request() method options - const urlProps = { - auth: parsedURL.username || parsedURL.password - ? `${parsedURL.username}:${parsedURL.password}` - : '', - host: parsedURL.host, - hostname: parsedURL.hostname, - path: `${parsedURL.pathname}${parsedURL.search}`, - port: parsedURL.port, - protocol: parsedURL.protocol, - } - - return { - ...urlProps, - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent, - ca, - cert, - ciphers, - clientCertEngine, - crl, - dhparam, - ecdhCurve, - family, - honorCipherOrder, - key, - passphrase, - pfx, - rejectUnauthorized, - secureOptions, - secureProtocol, - servername, - sessionIdContext, - } - } -} - -module.exports = Request - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true }, -}) diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js deleted file mode 100644 index 54cb52d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js +++ /dev/null @@ -1,90 +0,0 @@ -'use strict' -const http = require('http') -const { STATUS_CODES } = http - -const Headers = require('./headers.js') -const Body = require('./body.js') -const { clone, extractContentType } = Body - -const INTERNALS = Symbol('Response internals') - -class Response extends Body { - constructor (body = null, opts = {}) { - super(body, opts) - - const status = opts.status || 200 - const headers = new Headers(opts.headers) - - if (body !== null && body !== undefined && !headers.has('Content-Type')) { - const contentType = extractContentType(body) - if (contentType) { - headers.append('Content-Type', contentType) - } - } - - this[INTERNALS] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter, - trailer: Promise.resolve(opts.trailer || new Headers()), - } - } - - get trailer () { - return this[INTERNALS].trailer - } - - get url () { - return this[INTERNALS].url || '' - } - - get status () { - return this[INTERNALS].status - } - - get ok () { - return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 - } - - get redirected () { - return this[INTERNALS].counter > 0 - } - - get statusText () { - return this[INTERNALS].statusText - } - - get headers () { - return this[INTERNALS].headers - } - - clone () { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected, - trailer: this.trailer, - }) - } - - get [Symbol.toStringTag] () { - return 'Response' - } -} - -module.exports = Response - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true }, -}) diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json deleted file mode 100644 index ada5aed..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "name": "minipass-fetch", - "version": "2.1.2", - "description": "An implementation of window.fetch in Node.js using Minipass streams", - "license": "MIT", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "snap": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "prepublishOnly": "git push origin --follow-tags", - "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" - }, - "tap": { - "coverage-map": "map.js", - "check-coverage": true - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "@ungap/url-search-params": "^0.2.2", - "abort-controller": "^3.0.0", - "abortcontroller-polyfill": "~1.7.3", - "encoding": "^0.1.13", - "form-data": "^4.0.0", - "nock": "^13.2.4", - "parted": "^0.1.1", - "string-to-arraybuffer": "^1.0.2", - "tap": "^16.0.0" - }, - "dependencies": { - "minipass": "^3.1.6", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/minipass-fetch.git" - }, - "keywords": [ - "fetch", - "minipass", - "node-fetch", - "window.fetch" - ], - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "author": "GitHub Inc.", - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/minipass/LICENSE deleted file mode 100644 index bf1dece..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/minipass/index.js deleted file mode 100644 index e8797aa..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/index.js +++ /dev/null @@ -1,649 +0,0 @@ -'use strict' -const proc = typeof process === 'object' && process ? process : { - stdout: null, - stderr: null, -} -const EE = require('events') -const Stream = require('stream') -const SD = require('string_decoder').StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const EMITTED_ERROR = Symbol('emittedError') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') -const EMITDATA = Symbol('emitData') -const EMITEND = Symbol('emitEnd') -const EMITEND2 = Symbol('emitEnd2') -const ASYNC = Symbol('async') - -const defer = fn => Promise.resolve().then(fn) - -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') - -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' - -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 - -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - -class Pipe { - constructor (src, dest, opts) { - this.src = src - this.dest = dest - this.opts = opts - this.ondrain = () => src[RESUME]() - dest.on('drain', this.ondrain) - } - unpipe () { - this.dest.removeListener('drain', this.ondrain) - } - // istanbul ignore next - only here for the prototype - proxyErrors () {} - end () { - this.unpipe() - if (this.opts.end) - this.dest.end() - } -} - -class PipeProxyErrors extends Pipe { - unpipe () { - this.src.removeListener('error', this.proxyErrors) - super.unpipe() - } - constructor (src, dest, opts) { - super(src, dest, opts) - this.proxyErrors = er => dest.emit('error', er) - src.on('error', this.proxyErrors) - } -} - -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = [] - this.buffer = [] - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[ASYNC] = options && !!options.async || false - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this[EMITTED_ERROR] = null - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } - - get bufferLength () { return this[BUFFERLENGTH] } - - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') - - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } - - this[ENCODING] = enc - } - - setEncoding (enc) { - this.encoding = enc - } - - get objectMode () { return this[OBJECTMODE] } - set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } - - get ['async'] () { return this[ASYNC] } - set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } - - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') - - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - - if (!encoding) - encoding = 'utf8' - - const fn = this[ASYNC] ? defer : f => f() - - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } - - // handle object mode up front, since it's simpler - // this yields better performance, fewer checks later. - if (this[OBJECTMODE]) { - /* istanbul ignore if - maybe impossible? */ - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - // at this point the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!chunk.length) { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - fn(cb) - return this.flowing - } - - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } - - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) - - // Note: flushing CAN potentially switch us into not-flowing mode - if (this.flowing && this[BUFFERLENGTH] !== 0) - this[FLUSH](true) - - if (this.flowing) - this.emit('data', chunk) - else - this[BUFFERPUSH](chunk) - - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - - if (cb) - fn(cb) - - return this.flowing - } - - read (n) { - if (this[DESTROYED]) - return null - - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { - this[MAYBE_EMIT_END]() - return null - } - - if (this[OBJECTMODE]) - n = null - - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = [this.buffer.join('')] - else - this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] - } - - const ret = this[READ](n || null, this.buffer[0]) - this[MAYBE_EMIT_END]() - return ret - } - - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer[0] = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } - - this.emit('data', chunk) - - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - - return chunk - } - - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } - - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return - - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } - - resume () { - return this[RESUME]() - } - - pause () { - this[FLOWING] = false - this[PAUSED] = true - } - - get destroyed () { - return this[DESTROYED] - } - - get flowing () { - return this[FLOWING] - } - - get paused () { - return this[PAUSED] - } - - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - this.buffer.push(chunk) - } - - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer[0].length - } - return this.buffer.shift() - } - - [FLUSH] (noDrain) { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!noDrain && !this.buffer.length && !this[EOF]) - this.emit('drain') - } - - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } - - pipe (dest, opts) { - if (this[DESTROYED]) - return - - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === proc.stdout || dest === proc.stderr) - opts.end = false - else - opts.end = opts.end !== false - opts.proxyErrors = !!opts.proxyErrors - - // piping an ended stream ends immediately - if (ended) { - if (opts.end) - dest.end() - } else { - this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) - : new PipeProxyErrors(this, dest, opts)) - if (this[ASYNC]) - defer(() => this[RESUME]()) - else - this[RESUME]() - } - - return dest - } - - unpipe (dest) { - const p = this.pipes.find(p => p.dest === dest) - if (p) { - this.pipes.splice(this.pipes.indexOf(p), 1) - p.unpipe() - } - } - - addListener (ev, fn) { - return this.on(ev, fn) - } - - on (ev, fn) { - const ret = super.on(ev, fn) - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) - super.emit('readable') - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } else if (ev === 'error' && this[EMITTED_ERROR]) { - if (this[ASYNC]) - defer(() => fn.call(this, this[EMITTED_ERROR])) - else - fn.call(this, this[EMITTED_ERROR]) - } - return ret - } - - get emittedEnd () { - return this[EMITTED_END] - } - - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } - } - - emit (ev, data, ...extra) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - return !data ? false - : this[ASYNC] ? defer(() => this[EMITDATA](data)) - : this[EMITDATA](data) - } else if (ev === 'end') { - return this[EMITEND]() - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - const ret = super.emit('close') - this.removeAllListeners('close') - return ret - } else if (ev === 'error') { - this[EMITTED_ERROR] = data - const ret = super.emit('error', data) - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'resume') { - const ret = super.emit('resume') - this[MAYBE_EMIT_END]() - return ret - } else if (ev === 'finish' || ev === 'prefinish') { - const ret = super.emit(ev) - this.removeAllListeners(ev) - return ret - } - - // Some other unknown event - const ret = super.emit(ev, data, ...extra) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITDATA] (data) { - for (const p of this.pipes) { - if (p.dest.write(data) === false) - this.pause() - } - const ret = super.emit('data', data) - this[MAYBE_EMIT_END]() - return ret - } - - [EMITEND] () { - if (this[EMITTED_END]) - return - - this[EMITTED_END] = true - this.readable = false - if (this[ASYNC]) - defer(() => this[EMITEND2]()) - else - this[EMITEND2]() - } - - [EMITEND2] () { - if (this[DECODER]) { - const data = this[DECODER].end() - if (data) { - for (const p of this.pipes) { - p.dest.write(data) - } - super.emit('data', data) - } - } - - for (const p of this.pipes) { - p.end() - } - const ret = super.emit('end') - this.removeAllListeners('end') - return ret - } - - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } - - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('error', er => reject(er)) - this.on('end', () => resolve()) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } - - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer.length = 0 - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - - return this - } - - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/minipass/package.json deleted file mode 100644 index 548d03f..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/minipass/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "minipass", - "version": "3.3.6", - "description": "minimal implementation of a PassThrough stream", - "main": "index.js", - "types": "index.d.ts", - "dependencies": { - "yallist": "^4.0.0" - }, - "devDependencies": { - "@types/node": "^17.0.41", - "end-of-stream": "^1.4.0", - "prettier": "^2.6.2", - "tap": "^16.2.0", - "through2": "^2.0.3", - "ts-node": "^10.8.1", - "typescript": "^4.7.3" - }, - "scripts": { - "test": "tap", - "preversion": "npm test", - "postversion": "npm publish", - "postpublish": "git push origin --follow-tags" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/isaacs/minipass.git" - }, - "keywords": [ - "passthrough", - "stream" - ], - "author": "Isaac Z. Schlueter (http://blog.izs.me/)", - "license": "ISC", - "files": [ - "index.d.ts", - "index.js" - ], - "tap": { - "check-coverage": true - }, - "engines": { - "node": ">=8" - }, - "prettier": { - "semi": false, - "printWidth": 80, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md b/node_modules/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md deleted file mode 100644 index e335388..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md +++ /dev/null @@ -1,16 +0,0 @@ -ISC License - -Copyright 2021 (c) npm, Inc. - -Permission to use, copy, modify, and/or distribute this software for -any purpose with or without fee is hereby granted, provided that the -above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS -ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE -COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR -CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS -OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE -OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE -USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js deleted file mode 100644 index 1443137..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js +++ /dev/null @@ -1,524 +0,0 @@ -'use strict' - -const crypto = require('crypto') -const MiniPass = require('minipass') - -const SPEC_ALGORITHMS = ['sha256', 'sha384', 'sha512'] - -// TODO: this should really be a hardcoded list of algorithms we support, -// rather than [a-z0-9]. -const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i -const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ -const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ -const VCHAR_REGEX = /^[\x21-\x7E]+$/ - -const defaultOpts = { - algorithms: ['sha512'], - error: false, - options: [], - pickAlgorithm: getPrioritizedHash, - sep: ' ', - single: false, - strict: false, -} - -const ssriOpts = (opts = {}) => ({ ...defaultOpts, ...opts }) - -const getOptString = options => !options || !options.length - ? '' - : `?${options.join('?')}` - -const _onEnd = Symbol('_onEnd') -const _getOptions = Symbol('_getOptions') -const _emittedSize = Symbol('_emittedSize') -const _emittedIntegrity = Symbol('_emittedIntegrity') -const _emittedVerified = Symbol('_emittedVerified') - -class IntegrityStream extends MiniPass { - constructor (opts) { - super() - this.size = 0 - this.opts = opts - - // may be overridden later, but set now for class consistency - this[_getOptions]() - - // options used for calculating stream. can't be changed. - const { algorithms = defaultOpts.algorithms } = opts - this.algorithms = Array.from( - new Set(algorithms.concat(this.algorithm ? [this.algorithm] : [])) - ) - this.hashes = this.algorithms.map(crypto.createHash) - } - - [_getOptions] () { - const { - integrity, - size, - options, - } = { ...defaultOpts, ...this.opts } - - // For verification - this.sri = integrity ? parse(integrity, this.opts) : null - this.expectedSize = size - this.goodSri = this.sri ? !!Object.keys(this.sri).length : false - this.algorithm = this.goodSri ? this.sri.pickAlgorithm(this.opts) : null - this.digests = this.goodSri ? this.sri[this.algorithm] : null - this.optString = getOptString(options) - } - - on (ev, handler) { - if (ev === 'size' && this[_emittedSize]) { - return handler(this[_emittedSize]) - } - - if (ev === 'integrity' && this[_emittedIntegrity]) { - return handler(this[_emittedIntegrity]) - } - - if (ev === 'verified' && this[_emittedVerified]) { - return handler(this[_emittedVerified]) - } - - return super.on(ev, handler) - } - - emit (ev, data) { - if (ev === 'end') { - this[_onEnd]() - } - return super.emit(ev, data) - } - - write (data) { - this.size += data.length - this.hashes.forEach(h => h.update(data)) - return super.write(data) - } - - [_onEnd] () { - if (!this.goodSri) { - this[_getOptions]() - } - const newSri = parse(this.hashes.map((h, i) => { - return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` - }).join(' '), this.opts) - // Integrity verification mode - const match = this.goodSri && newSri.match(this.sri, this.opts) - if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { - /* eslint-disable-next-line max-len */ - const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) - err.code = 'EBADSIZE' - err.found = this.size - err.expected = this.expectedSize - err.sri = this.sri - this.emit('error', err) - } else if (this.sri && !match) { - /* eslint-disable-next-line max-len */ - const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = this.digests - err.algorithm = this.algorithm - err.sri = this.sri - this.emit('error', err) - } else { - this[_emittedSize] = this.size - this.emit('size', this.size) - this[_emittedIntegrity] = newSri - this.emit('integrity', newSri) - if (match) { - this[_emittedVerified] = match - this.emit('verified', match) - } - } - } -} - -class Hash { - get isHash () { - return true - } - - constructor (hash, opts) { - opts = ssriOpts(opts) - const strict = !!opts.strict - this.source = hash.trim() - - // set default values so that we make V8 happy to - // always see a familiar object template. - this.digest = '' - this.algorithm = '' - this.options = [] - - // 3.1. Integrity metadata (called "Hash" by ssri) - // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description - const match = this.source.match( - strict - ? STRICT_SRI_REGEX - : SRI_REGEX - ) - if (!match) { - return - } - if (strict && !SPEC_ALGORITHMS.some(a => a === match[1])) { - return - } - this.algorithm = match[1] - this.digest = match[2] - - const rawOpts = match[3] - if (rawOpts) { - this.options = rawOpts.slice(1).split('?') - } - } - - hexDigest () { - return this.digest && Buffer.from(this.digest, 'base64').toString('hex') - } - - toJSON () { - return this.toString() - } - - toString (opts) { - opts = ssriOpts(opts) - if (opts.strict) { - // Strict mode enforces the standard as close to the foot of the - // letter as it can. - if (!( - // The spec has very restricted productions for algorithms. - // https://www.w3.org/TR/CSP2/#source-list-syntax - SPEC_ALGORITHMS.some(x => x === this.algorithm) && - // Usually, if someone insists on using a "different" base64, we - // leave it as-is, since there's multiple standards, and the - // specified is not a URL-safe variant. - // https://www.w3.org/TR/CSP2/#base64_value - this.digest.match(BASE64_REGEX) && - // Option syntax is strictly visual chars. - // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression - // https://tools.ietf.org/html/rfc5234#appendix-B.1 - this.options.every(opt => opt.match(VCHAR_REGEX)) - )) { - return '' - } - } - const options = this.options && this.options.length - ? `?${this.options.join('?')}` - : '' - return `${this.algorithm}-${this.digest}${options}` - } -} - -class Integrity { - get isIntegrity () { - return true - } - - toJSON () { - return this.toString() - } - - isEmpty () { - return Object.keys(this).length === 0 - } - - toString (opts) { - opts = ssriOpts(opts) - let sep = opts.sep || ' ' - if (opts.strict) { - // Entries must be separated by whitespace, according to spec. - sep = sep.replace(/\S+/g, ' ') - } - return Object.keys(this).map(k => { - return this[k].map(hash => { - return Hash.prototype.toString.call(hash, opts) - }).filter(x => x.length).join(sep) - }).filter(x => x.length).join(sep) - } - - concat (integrity, opts) { - opts = ssriOpts(opts) - const other = typeof integrity === 'string' - ? integrity - : stringify(integrity, opts) - return parse(`${this.toString(opts)} ${other}`, opts) - } - - hexDigest () { - return parse(this, { single: true }).hexDigest() - } - - // add additional hashes to an integrity value, but prevent - // *changing* an existing integrity hash. - merge (integrity, opts) { - opts = ssriOpts(opts) - const other = parse(integrity, opts) - for (const algo in other) { - if (this[algo]) { - if (!this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest))) { - throw new Error('hashes do not match, cannot update integrity') - } - } else { - this[algo] = other[algo] - } - } - } - - match (integrity, opts) { - opts = ssriOpts(opts) - const other = parse(integrity, opts) - const algo = other.pickAlgorithm(opts) - return ( - this[algo] && - other[algo] && - this[algo].find(hash => - other[algo].find(otherhash => - hash.digest === otherhash.digest - ) - ) - ) || false - } - - pickAlgorithm (opts) { - opts = ssriOpts(opts) - const pickAlgorithm = opts.pickAlgorithm - const keys = Object.keys(this) - return keys.reduce((acc, algo) => { - return pickAlgorithm(acc, algo) || acc - }) - } -} - -module.exports.parse = parse -function parse (sri, opts) { - if (!sri) { - return null - } - opts = ssriOpts(opts) - if (typeof sri === 'string') { - return _parse(sri, opts) - } else if (sri.algorithm && sri.digest) { - const fullSri = new Integrity() - fullSri[sri.algorithm] = [sri] - return _parse(stringify(fullSri, opts), opts) - } else { - return _parse(stringify(sri, opts), opts) - } -} - -function _parse (integrity, opts) { - // 3.4.3. Parse metadata - // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata - if (opts.single) { - return new Hash(integrity, opts) - } - const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { - const hash = new Hash(string, opts) - if (hash.algorithm && hash.digest) { - const algo = hash.algorithm - if (!acc[algo]) { - acc[algo] = [] - } - acc[algo].push(hash) - } - return acc - }, new Integrity()) - return hashes.isEmpty() ? null : hashes -} - -module.exports.stringify = stringify -function stringify (obj, opts) { - opts = ssriOpts(opts) - if (obj.algorithm && obj.digest) { - return Hash.prototype.toString.call(obj, opts) - } else if (typeof obj === 'string') { - return stringify(parse(obj, opts), opts) - } else { - return Integrity.prototype.toString.call(obj, opts) - } -} - -module.exports.fromHex = fromHex -function fromHex (hexDigest, algorithm, opts) { - opts = ssriOpts(opts) - const optString = getOptString(opts.options) - return parse( - `${algorithm}-${ - Buffer.from(hexDigest, 'hex').toString('base64') - }${optString}`, opts - ) -} - -module.exports.fromData = fromData -function fromData (data, opts) { - opts = ssriOpts(opts) - const algorithms = opts.algorithms - const optString = getOptString(opts.options) - return algorithms.reduce((acc, algo) => { - const digest = crypto.createHash(algo).update(data).digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the string we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) -} - -module.exports.fromStream = fromStream -function fromStream (stream, opts) { - opts = ssriOpts(opts) - const istream = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(istream) - stream.on('error', reject) - istream.on('error', reject) - let sri - istream.on('integrity', s => { - sri = s - }) - istream.on('end', () => resolve(sri)) - istream.on('data', () => {}) - }) -} - -module.exports.checkData = checkData -function checkData (data, sri, opts) { - opts = ssriOpts(opts) - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - if (opts.error) { - throw Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - ) - } else { - return false - } - } - const algorithm = sri.pickAlgorithm(opts) - const digest = crypto.createHash(algorithm).update(data).digest('base64') - const newSri = parse({ algorithm, digest }) - const match = newSri.match(sri, opts) - if (match || !opts.error) { - return match - } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { - /* eslint-disable-next-line max-len */ - const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) - err.code = 'EBADSIZE' - err.found = data.length - err.expected = opts.size - err.sri = sri - throw err - } else { - /* eslint-disable-next-line max-len */ - const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) - err.code = 'EINTEGRITY' - err.found = newSri - err.expected = sri - err.algorithm = algorithm - err.sri = sri - throw err - } -} - -module.exports.checkStream = checkStream -function checkStream (stream, sri, opts) { - opts = ssriOpts(opts) - opts.integrity = sri - sri = parse(sri, opts) - if (!sri || !Object.keys(sri).length) { - return Promise.reject(Object.assign( - new Error('No valid integrity hashes to check against'), { - code: 'EINTEGRITY', - } - )) - } - const checker = integrityStream(opts) - return new Promise((resolve, reject) => { - stream.pipe(checker) - stream.on('error', reject) - checker.on('error', reject) - let verified - checker.on('verified', s => { - verified = s - }) - checker.on('end', () => resolve(verified)) - checker.on('data', () => {}) - }) -} - -module.exports.integrityStream = integrityStream -function integrityStream (opts = {}) { - return new IntegrityStream(opts) -} - -module.exports.create = createIntegrity -function createIntegrity (opts) { - opts = ssriOpts(opts) - const algorithms = opts.algorithms - const optString = getOptString(opts.options) - - const hashes = algorithms.map(crypto.createHash) - - return { - update: function (chunk, enc) { - hashes.forEach(h => h.update(chunk, enc)) - return this - }, - digest: function (enc) { - const integrity = algorithms.reduce((acc, algo) => { - const digest = hashes.shift().digest('base64') - const hash = new Hash( - `${algo}-${digest}${optString}`, - opts - ) - /* istanbul ignore else - it would be VERY strange if the hash we - * just calculated with an algo did not have an algo or digest. - */ - if (hash.algorithm && hash.digest) { - const hashAlgo = hash.algorithm - if (!acc[hashAlgo]) { - acc[hashAlgo] = [] - } - acc[hashAlgo].push(hash) - } - return acc - }, new Integrity()) - - return integrity - }, - } -} - -const NODE_HASHES = new Set(crypto.getHashes()) - -// This is a Best Effort™ at a reasonable priority for hash algos -const DEFAULT_PRIORITY = [ - 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', - // TODO - it's unclear _which_ of these Node will actually use as its name - // for the algorithm, so we guesswork it based on the OpenSSL names. - 'sha3', - 'sha3-256', 'sha3-384', 'sha3-512', - 'sha3_256', 'sha3_384', 'sha3_512', -].filter(algo => NODE_HASHES.has(algo)) - -function getPrioritizedHash (algo1, algo2) { - /* eslint-disable-next-line max-len */ - return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) - ? algo1 - : algo2 -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/ssri/package.json deleted file mode 100644 index 91c1f91..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/ssri/package.json +++ /dev/null @@ -1,63 +0,0 @@ -{ - "name": "ssri", - "version": "9.0.1", - "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", - "main": "lib/index.js", - "files": [ - "bin/", - "lib/" - ], - "scripts": { - "prerelease": "npm t", - "postrelease": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "posttest": "npm run lint", - "test": "tap", - "coverage": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "snap": "tap" - }, - "tap": { - "check-coverage": true - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/ssri.git" - }, - "keywords": [ - "w3c", - "web", - "security", - "integrity", - "checksum", - "hashing", - "subresource integrity", - "sri", - "sri hash", - "sri string", - "sri generator", - "html" - ], - "author": "GitHub Inc.", - "license": "ISC", - "dependencies": { - "minipass": "^3.1.1" - }, - "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.0.1" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE deleted file mode 100644 index 69619c1..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE +++ /dev/null @@ -1,5 +0,0 @@ -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js deleted file mode 100644 index d067d2e..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js +++ /dev/null @@ -1,7 +0,0 @@ -var path = require('path') - -var uniqueSlug = require('unique-slug') - -module.exports = function (filepath, prefix, uniq) { - return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/package.json deleted file mode 100644 index bfdec2c..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-filename/package.json +++ /dev/null @@ -1,48 +0,0 @@ -{ - "name": "unique-filename", - "version": "2.0.1", - "description": "Generate a unique filename for use in temporary directories or caches.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "posttest": "npm run lint" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-filename.git" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "bugs": { - "url": "https://github.com/iarna/unique-filename/issues" - }, - "homepage": "https://github.com/iarna/unique-filename", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.3.0" - }, - "dependencies": { - "unique-slug": "^3.0.0" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE deleted file mode 100644 index 7953647..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright npm, Inc - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js b/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js deleted file mode 100644 index 1bac84d..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js +++ /dev/null @@ -1,11 +0,0 @@ -'use strict' -var MurmurHash3 = require('imurmurhash') - -module.exports = function (uniq) { - if (uniq) { - var hash = new MurmurHash3(uniq) - return ('00000000' + hash.result().toString(16)).slice(-8) - } else { - return (Math.random().toString(16) + '0000000').slice(2, 10) - } -} diff --git a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/package.json b/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/package.json deleted file mode 100644 index 3194408..0000000 --- a/node_modules/npm/node_modules/node-gyp/node_modules/unique-slug/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "unique-slug", - "version": "3.0.0", - "description": "Generate a unique character string suitible for use in files and URLs.", - "main": "lib/index.js", - "scripts": { - "test": "tap", - "lint": "eslint \"**/*.js\"", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "snap": "tap", - "posttest": "npm run lint" - }, - "keywords": [], - "author": "GitHub Inc.", - "license": "ISC", - "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "3.5.0", - "tap": "^16.3.0" - }, - "repository": { - "type": "git", - "url": "https://github.com/npm/unique-slug.git" - }, - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "files": [ - "bin/", - "lib/" - ], - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - }, - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "3.5.0" - } -} diff --git a/node_modules/npm/node_modules/node-gyp/package.json b/node_modules/npm/node_modules/node-gyp/package.json index f95ebea..7e9fb64 100644 --- a/node_modules/npm/node_modules/node-gyp/package.json +++ b/node_modules/npm/node_modules/node-gyp/package.json @@ -11,8 +11,8 @@ "bindings", "gyp" ], - "version": "9.3.1", - "installVersion": 9, + "version": "9.4.0", + "installVersion": 11, "author": "Nathan Rajlich (http://tootallnate.net)", "repository": { "type": "git", @@ -23,9 +23,10 @@ "main": "./lib/node-gyp.js", "dependencies": { "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", "glob": "^7.1.4", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^10.0.3", + "make-fetch-happen": "^11.0.3", "nopt": "^6.0.0", "npmlog": "^6.0.0", "rimraf": "^3.0.2", @@ -38,13 +39,13 @@ }, "devDependencies": { "bindings": "^1.5.0", + "mocha": "^10.2.0", "nan": "^2.14.2", "require-inject": "^1.4.4", - "standard": "^14.3.4", - "tap": "^12.7.0" + "standard": "^14.3.4" }, "scripts": { "lint": "standard */*.js test/**/*.js", - "test": "npm run lint && tap --timeout=600 test/test-*" + "test": "npm run lint && mocha --reporter=test/reporter.js test/test-download.js test/test-*" } } diff --git a/node_modules/npm/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt b/node_modules/npm/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt new file mode 100644 index 0000000..7cd20f8 --- /dev/null +++ b/node_modules/npm/node_modules/node-gyp/test/fixtures/VS_2022_Community_workload.txt @@ -0,0 +1,569 @@ +[ + { + "path": "C:\\Program Files\\Microsoft Visual Studio\\2022\\Community", + "version": "17.4.33213.308", + "packages": [ + "Microsoft.VisualStudio.Product.Community", + "Microsoft.VisualStudio.PackageGroup.LiveShare.VSCore", + "Microsoft.VisualStudio.LiveShare.VSCore", + "Microsoft.VisualStudio.Workload.NativeDesktop", + "Microsoft.VisualStudio.Component.VC.ASAN", + "Microsoft.VisualCpp.ASAN.X86", + "Microsoft.VC.14.34.17.4.ASAN.X86.base", + "Microsoft.VC.14.34.17.4.ASAN.X64.base", + "Microsoft.VC.14.34.17.4.ASAN.Headers.base", + "Microsoft.VisualStudio.VC.IDE.Project.Factories", + "Microsoft.VisualStudio.Component.VC.TestAdapterForGoogleTest", + "Microsoft.VisualStudio.VC.Ide.TestAdapterForGoogleTest", + "Microsoft.VisualStudio.Component.VC.TestAdapterForBoostTest", + "Microsoft.VisualStudio.VC.Ide.TestAdapterForBoostTest", + "Microsoft.VisualStudio.Component.VC.CMake.Project", + "Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions.CMake", + "Microsoft.VisualStudio.VC.CMake", + "Microsoft.VisualStudio.VC.CMake.Project", + "Microsoft.VisualStudio.VC.CMake.Client", + "Microsoft.VisualStudio.VC.ExternalBuildFramework", + "Microsoft.VisualStudio.Component.VC.DiagnosticTools", + "Microsoft.VisualStudio.ComponentGroup.NativeDesktop.Core", + "Microsoft.VisualStudio.PackageGroup.TestTools.Native", + "Microsoft.VisualStudio.Component.VC.Redist.14.Latest", + "Microsoft.VisualStudio.VC.Templates.UnitTest", + "Microsoft.VisualStudio.VC.UnitTest.Desktop.Build.Core", + "Microsoft.VisualStudio.TestTools.TestPlatform.V1.CPP", + "Microsoft.VisualStudio.VC.Templates.UnitTest.Resources", + "Microsoft.VisualStudio.VC.Templates.Desktop", + "Microsoft.VisualStudio.Component.Graphics", + "Microsoft.VisualStudio.Graphics.Viewers", + "Microsoft.VisualStudio.Graphics.Viewers.Resources", + "Microsoft.VisualStudio.Component.VC.ATL.ARM64", + "Microsoft.VisualCpp.ATL.ARM64", + "Microsoft.VC.14.34.17.4.ATL.ARM64.base", + "Microsoft.VisualStudio.Component.VC.ATL", + "Microsoft.VisualStudio.VC.Ide.ATL", + "Microsoft.VisualStudio.VC.Ide.ATL.Resources", + "Microsoft.VisualCpp.ATL.X86", + "Microsoft.VC.14.34.17.4.ATL.X86.base", + "Microsoft.VisualCpp.ATL.X64", + "Microsoft.VC.14.34.17.4.ATL.X64.base", + "Microsoft.VC.14.34.17.4.Props.ATLMFC", + "Microsoft.VisualCpp.ATL.Source", + "Microsoft.VC.14.34.17.4.ATL.Source.base", + "Microsoft.VisualCpp.ATL.Headers", + "Microsoft.VC.14.34.17.4.ATL.Headers.base", + "Microsoft.VC.14.34.17.4.Servicing.ATL", + "Microsoft.VisualStudio.Component.VC.Tools.ARM64", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64", + "Microsoft.VS.VC.vcvars.arm64.Shortcuts", + "Microsoft.VisualCpp.CA.Ext.Hostx64.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.TargetARM64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.TargetARM64.Res.base", + "Microsoft.VisualCpp.CA.Ext.HostARM64.TargetARM64", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.TargetARM64.Res.base", + "Microsoft.VisualCpp.Tools.Hostx86.Targetarm64", + "Microsoft.VC.14.34.17.4.Tools.Hostx86.Targetarm64.base", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetARM64.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetARM64", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetARM64.Res.base", + "Microsoft.VisualCpp.CRT.Redist.ARM64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.ARM64.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.ARM64", + "Microsoft.VC.14.34.17.4.CRT.Redist.ARM64.base", + "Microsoft.VisualCpp.CRT.ARM64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.ARM64.OneCore.Desktop.base", + "Microsoft.VC.14.34.17.4.CRT.ARM64.OneCore.Desktop.debug.base", + "Microsoft.VisualCpp.CRT.ARM64.Store", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Store.base", + "Microsoft.VisualCpp.CRT.ARM64.Desktop", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Desktop.base", + "Microsoft.VC.14.34.17.4.CRT.ARM64.Desktop.debug.base", + "Microsoft.VisualStudio.PackageGroup.VC.Tools.x64.ARM64", + "Microsoft.VisualCpp.Tools.Core", + "Microsoft.VisualCpp.PGO.ARM64", + "Microsoft.VC.14.34.17.4.PGO.ARM64.base", + "Microsoft.VisualCpp.Premium.Tools.Hostx86.Targetarm64", + "Microsoft.VC.14.34.17.4.Premium.Tools.Hostx86.Targetarm64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX86.TargetARM64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetARM64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetARM64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.ARM64.Base", + "Microsoft.VC.14.34.17.4.Premium.Tools.ARM64.Base.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetARM64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetARM64.base", + "Microsoft.VC.14.34.17.4.Props.ARM64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetARM64.Res.base", + "Microsoft.VisualStudio.Component.VC.Tools.ARM64EC", + "Microsoft.VisualStudio.Component.Windows11SDK.22621", + "Win11SDK_10.0.22621", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64EC.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM64EC", + "Microsoft.VisualCpp.CRT.ARM64EC.Store", + "Microsoft.VC.14.34.17.4.CRT.ARM64EC.Store.base", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + "Microsoft.VisualCpp.CodeAnalysis.Extensions", + "Microsoft.VisualCpp.CA.Ext.HostARM64.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.HostARM64.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx86.base", + "Microsoft.VC.14.34.17.4.CA.Ext.HostARM64.Targetx86.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx86.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx86.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx86.Targetx86.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx64.Targetx64", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx64.base", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx64.Res.base", + "Microsoft.VisualCpp.CA.Ext.Hostx64.Targetx86", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx86.base", + "Microsoft.VC.14.34.17.4.Servicing.CAExtensions", + "Microsoft.VC.14.34.17.4.CA.Ext.Hostx64.Targetx86.Res.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.HostX64.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Tools.HostX64.TargetX64.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX86.base", + "Microsoft.VisualCpp.RuntimeDebug.14", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.HostARM64.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.TargetX64.base", + "Microsoft.VisualCpp.RuntimeDebug.14.ARM64", + "Microsoft.VisualCpp.Redist.14.Latest", + "Microsoft.VisualCpp.Redist.14.Latest", + "Microsoft.VC.14.34.17.4.Tools.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX86.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX86.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX86.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostX86.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostARM64.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostARM64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostARM64.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostARM64.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostARM64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.HostARM64.Targetx64.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX86", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetX86.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetX86.Res.base", + "Microsoft.VisualCpp.Premium.Tools.HostX64.TargetX64", + "Microsoft.VC.14.34.17.4.Premium.Tools.HostX64.TargetX64.base", + "Microsoft.VC.14.34.17.4.Prem.HostX64.TargetX64.Res.base", + "Microsoft.VisualCpp.PGO.X86", + "Microsoft.VC.14.34.17.4.PGO.X86.base", + "Microsoft.VisualCpp.PGO.X64", + "Microsoft.VC.14.34.17.4.PGO.X64.base", + "Microsoft.VisualCpp.PGO.Headers", + "Microsoft.VC.14.34.17.4.PGO.Headers.base", + "Microsoft.VisualCpp.CRT.x86.Store", + "Microsoft.VC.14.34.17.4.CRT.x86.Store.base", + "Microsoft.VisualCpp.CRT.x86.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x86.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.x64.Store", + "Microsoft.VC.14.34.17.4.CRT.x64.Store.base", + "Microsoft.VisualCpp.CRT.x64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x64.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.x86.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.x86.OneCore.Desktop.base", + "Microsoft.VisualCpp.CRT.Redist.x64.OneCore.Desktop", + "Microsoft.VC.14.34.17.4.CRT.Redist.x64.OneCore.Desktop.base", + "Microsoft.VisualStudio.PackageGroup.VC.Tools.x86", + "Microsoft.VisualCpp.Tools.Hostx86.Targetx64.Res", + "Microsoft.VisualCpp.Tools.HostX86.TargetX64", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX64.base", + "Microsoft.VC.14.34.17.4.Props.x64", + "Microsoft.VC.14.34.17.4.Tools.Hostx86.Targetx64.Res.base", + "Microsoft.VisualCpp.Tools.HostX86.TargetX86.Res", + "Microsoft.VisualCpp.Tools.HostX86.TargetX86", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX86.base", + "Microsoft.VC.14.34.17.4.Servicing.Compilers", + "Microsoft.VC.14.34.17.4.Props.x86", + "Microsoft.VC.14.34.17.4.Props", + "Microsoft.VC.14.34.17.4.Tools.HostX86.TargetX86.Res.base", + "Microsoft.VisualCpp.Tools.Core.Resources", + "Microsoft.VisualCpp.Tools.Core.x86", + "Microsoft.VC.14.34.17.4.Tools.Core.Props", + "Microsoft.VisualCpp.DIA.SDK", + "Microsoft.VisualCpp.Servicing.DIASDK", + "Microsoft.VisualCpp.CRT.x86.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x86.Desktop.base", + "Microsoft.VisualCpp.CRT.x64.Desktop", + "Microsoft.VC.14.34.17.4.CRT.x64.Desktop.base", + "Microsoft.VisualCpp.CRT.Source", + "Microsoft.VC.14.34.17.4.CRT.Source.base", + "Microsoft.VisualCpp.CRT.Redist.X86", + "Microsoft.VC.14.34.17.4.CRT.Redist.X86.base", + "Microsoft.VisualCpp.CRT.Redist.X64", + "Microsoft.VisualCpp.CRT.Redist.Resources", + "Microsoft.VC.14.34.17.4.CRT.Redist.X64.base", + "Microsoft.VisualCpp.CRT.Headers", + "Microsoft.VC.14.34.17.4.CRT.Headers.base", + "Microsoft.VC.14.34.17.4.Servicing.CrtHeaders", + "Microsoft.VC.14.34.17.4.Servicing", + "Microsoft.VisualStudio.Component.VC.CoreIde", + "Microsoft.VisualStudio.VC.Ide.Pro", + "Microsoft.VisualStudio.VC.Ide.Pro.Resources", + "Microsoft.VisualStudio.VC.Templates.General", + "Microsoft.VisualStudio.VC.Templates.General.Resources", + "Microsoft.VisualStudio.VC.Items.Pro", + "Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Reduced", + "Microsoft.VisualStudio.VC.Ide.x64", + "Microsoft.VisualStudio.PackageGroup.VC.CoreIDE.Express", + "Microsoft.VisualStudio.VC.vcvars", + "Microsoft.VS.VC.vcvars.x86.Shortcuts", + "Microsoft.VS.VC.vcvars.x64.Shortcuts", + "Microsoft.VS.VC.vcvars.arm64_x64.Shortcuts", + "Microsoft.VisualStudio.VC.MSBuild.v170.X64.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.X64", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.ARM", + "Microsoft.VisualStudio.VC.MSBuild.v170.x86.v143", + "Microsoft.VisualStudio.VC.MSBuild.v170.X86", + "Microsoft.VisualStudio.VC.MSBuild.v170.Base", + "Microsoft.VisualStudio.VC.MSBuild.v170.Base.Resources", + "Microsoft.VisualStudio.VC.Ide.WinXPlus", + "Microsoft.VisualStudio.VC.Ide.Dskx", + "Microsoft.VisualStudio.VC.Ide.Dskx.Resources", + "Microsoft.VisualStudio.VC.Ide.Base", + "Microsoft.VisualStudio.VC.Ide.LanguageService", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.Scripts", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.PythonDistro", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.10", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.9", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.8", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.7", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.6", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.5", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.4", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.3", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.2", + "Microsoft.VisualStudio.VC.Ide.SecurityIssueAnalysis.3rdPartyLibs.1", + "Microsoft.VisualStudio.VC.Ide.VCPkgDatabase", + "Microsoft.VisualStudio.VC.Ide.Core", + "Microsoft.VisualStudio.VC.Ide.ProjectSystem", + "Microsoft.VisualStudio.VC.Ide.ProjectSystem.Resources", + "Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine", + "Microsoft.VisualStudio.VC.Ide.Core.VCProjectEngine.Resources", + "Microsoft.VisualStudio.VC.Ide.LanguageService.Resources", + "Microsoft.VisualStudio.VC.Llvm.Base", + "Microsoft.VisualStudio.VC.Ide.Base.Resources", + "Microsoft.Net.PackageGroup.4.8.1.Redist", + "Microsoft.VisualStudio.Component.IntelliCode", + "Microsoft.VisualStudio.IntelliCode.CSharp", + "Microsoft.VisualStudio.IntelliCode", + "Component.Microsoft.VisualStudio.LiveShare.2022", + "Microsoft.VisualStudio.Component.Debugger.JustInTime", + "Microsoft.VisualStudio.Debugger.ImmersiveActivateHelper.Msi", + "Microsoft.VisualStudio.Debugger.JustInTime", + "Microsoft.VisualStudio.Debugger.JustInTime.Msi", + "Microsoft.VisualStudio.LiveShare.2022", + "Microsoft.Icecap.Analysis", + "Microsoft.Icecap.Analysis.Resources", + "Microsoft.Icecap.Analysis.Resources.Targeted", + "Microsoft.Icecap.Collection.Msi", + "Microsoft.Icecap.Collection.Msi.Targeted", + "Microsoft.Icecap.Collection.Msi.Resources", + "Microsoft.Icecap.Collection.Msi.Resources.Targeted", + "Microsoft.DiagnosticsHub.Instrumentation", + "Microsoft.DiagnosticsHub.Instrumentation.Targeted", + "Microsoft.DiagnosticsHub.CpuSampling", + "Microsoft.DiagnosticsHub.CpuSampling.Targeted", + "Microsoft.PackageGroup.DiagnosticsHub.Platform", + "Microsoft.VisualStudio.InstrumentationEngine.ARM64", + "Microsoft.VisualStudio.InstrumentationEngine", + "Microsoft.DiagnosticsHub.Runtime.ExternalDependencies", + "SQLiteCore", + "SQLiteCore.Targeted", + "Microsoft.DiagnosticsHub.Runtime.ExternalDependencies.Targeted", + "Microsoft.DiagnosticsHub.Runtime", + "Microsoft.DiagnosticsHub.Runtime.Targeted", + "Microsoft.DiagnosticsHub.Collection.ExternalDependencies.arm64", + "Microsoft.DiagnosticsHub.Collection", + "Microsoft.DiagnosticsHub.Collection.Service", + "Microsoft.VisualStudio.VC.Ide.MDD", + "Microsoft.VisualStudio.VC.Ide.Linux.ConnectionManager", + "Microsoft.VisualStudio.VisualC.Utilities", + "Microsoft.VisualStudio.VisualC.Utilities.Resources", + "Microsoft.VisualStudio.VC.Ide.Linux.ConnectionManager.Resources", + "Microsoft.VisualStudio.VC.Ide.ResourceEditor", + "Microsoft.VisualStudio.VC.Ide.ResourceEditor.Resources", + "Microsoft.VisualStudio.PackageGroup.TestTools.Core", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V2.CLI", + "Microsoft.VisualStudio.TestTools.TestPlatform.V2.CLI", + "Microsoft.VisualStudio.TestTools.Pex.Common", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.V1.CLI", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.Legacy", + "Microsoft.VisualStudio.PackageGroup.MinShell.Interop", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Msi", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Tips.Common", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Tips.Resources", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.TestSettings", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Professional", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Common", + "Microsoft.VisualStudio.TestTools.TP.Legacy.Common.Res", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Core.Resources", + "Microsoft.VisualStudio.TestTools.TestPlatform.Legacy.Agent", + "Microsoft.VisualStudio.PackageGroup.TestTools.TestPlatform.IDE", + "Microsoft.VisualStudio.Cache.Service", + "Microsoft.VisualStudio.TestTools.TestWIExtension", + "Microsoft.VisualStudio.TestTools.TestPlatform.V1.CLI", + "Microsoft.VisualStudio.TestTools.TestPlatform.IDE", + "Microsoft.VisualStudio.PackageGroup.TestTools.CodeCoverage", + "Microsoft.VisualStudio.PackageGroup.TestTools.DataCollectors", + "Microsoft.VisualStudio.Component.NuGet", + "Microsoft.CredentialProvider", + "Microsoft.VisualStudio.NuGet.Licenses", + "Microsoft.VisualStudio.Component.TextTemplating", + "Microsoft.VisualStudio.TextTemplating.MSBuild", + "Microsoft.VisualStudio.TextTemplating.Integration", + "Microsoft.VisualStudio.TextTemplating.Core", + "Microsoft.VisualStudio.TextTemplating.Integration.Resources", + "Microsoft.VisualCpp.CRT.ClickOnce.Msi", + "Microsoft.VisualStudio.Component.Roslyn.LanguageServices", + "Microsoft.VisualStudio.InteractiveWindow", + "Microsoft.DiaSymReader.Native", + "Microsoft.VisualCpp.Redist.14", + "Microsoft.VisualCpp.Redist.14", + "Microsoft.VisualCpp.Servicing.Redist", + "Microsoft.VisualStudio.PackageGroup.StaticAnalysis", + "Microsoft.VisualStudio.StaticAnalysis.IDE", + "Microsoft.VisualStudio.StaticAnalysis.IDE.Resources", + "Microsoft.VisualStudio.StaticAnalysis.FxCop.Resources", + "Microsoft.VisualStudio.StaticAnalysis.auxil", + "Microsoft.VisualStudio.StaticAnalysis.auxil.Resources", + "Roslyn.VisualStudio.Setup.ServiceHub", + "Microsoft.Component.MSBuild", + "Microsoft.NuGet.Build.Tasks.Setup", + "Microsoft.VisualStudio.Component.Roslyn.Compiler", + "Microsoft.CodeAnalysis.Compilers", + "Microsoft.VisualStudio.Component.JavaScript.TypeScript", + "Microsoft.VisualStudio.JavaScript.ProjectSystem", + "Microsoft.VisualStudio.ComponentGroup.WebToolsExtensions", + "Microsoft.VisualStudio.ProTools", + "sqlsysclrtypes", + "SQLCommon", + "Microsoft.VisualStudio.ProTools.Resources", + "Microsoft.VisualStudio.Web.Scaffolding", + "Microsoft.VisualStudio.WebToolsExtensions", + "Microsoft.VisualStudio.ConnectedServices.Core", + "Microsoft.VisualStudio.WebTools", + "Microsoft.VisualStudio.WebToolsExtensions.MSBuild", + "Microsoft.VisualStudio.WebTools.Resources", + "Microsoft.VisualStudio.WebTools.WSP.FSA", + "Microsoft.VisualStudio.WebTools.WSP.FSA.Resources", + "Microsoft.VisualStudio.PackageGroup.Debugger.Script", + "Microsoft.VisualStudio.Component.TypeScript.TSServer", + "Microsoft.VisualStudio.Package.TypeScript.TSServer", + "Microsoft.VisualStudio.PackageGroup.JavaScript.Language", + "Microsoft.VisualStudio.Package.NodeJs", + "TypeScript.Build", + "TypeScript.LanguageService", + "TypeScript.Tools", + "Microsoft.VisualStudio.PackageGroup.Community", + "Microsoft.VisualStudio.Community.VB.x86", + "Microsoft.VisualStudio.Community.VB.x64", + "Microsoft.VisualStudio.PackageGroup.Core", + "Microsoft.VisualStudio.CodeSense.Community", + "Microsoft.VisualStudio.TestTools.TeamFoundationClient", + "Microsoft.VisualStudio.PackageGroup.Debugger.Core", + "Microsoft.VisualStudio.Debugger.BrokeredServices", + "Microsoft.VisualStudio.Debugger.VSCodeDebuggerHost", + "Microsoft.VisualStudio.Debugger.AzureAttach", + "Microsoft.VisualStudio.Web.Azure.Common", + "Microsoft.WebTools.Shared", + "Microsoft.WebTools.DotNet.Core.ItemTemplates", + "Microsoft.VisualStudio.PackageGroup.Debugger.TimeTravel.Replay", + "Microsoft.VisualStudio.VC.Ide.Debugger", + "Microsoft.VisualStudio.VC.Ide.Debugger.Concord", + "Microsoft.VisualStudio.VC.Ide.Debugger.Concord.Resources", + "Microsoft.VisualStudio.VC.Ide.Debugger.Resources", + "Microsoft.VisualStudio.VC.Ide.Common", + "Microsoft.VisualStudio.VC.Ide.Common.Resources", + "Microsoft.VisualStudio.Debugger.CollectionAgents", + "Microsoft.VisualStudio.Debugger.Parallel", + "Microsoft.VisualStudio.Debugger.Parallel.Resources", + "Microsoft.VisualStudio.Debugger.Managed", + "Microsoft.CodeAnalysis.ExpressionEvaluator", + "Microsoft.CodeAnalysis.VisualStudio.Setup", + "Microsoft.VisualStudio.Debugger.Concord.Managed", + "Microsoft.VisualStudio.Debugger.Concord.Managed.Resources", + "Microsoft.VisualStudio.Debugger.Managed.Resources", + "Microsoft.VisualStudio.Debugger.TargetComposition", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote.arm64", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote", + "Microsoft.VisualStudio.Debugger.TargetComposition.Remote", + "Microsoft.VisualStudio.Debugger.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote", + "Microsoft.VisualStudio.Debugger.Remote.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources.ARM64", + "Microsoft.VisualStudio.Debugger.Remote.ARM", + "Microsoft.VisualStudio.Debugger.Concord.Remote.ARM", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources.ARM", + "Microsoft.VisualStudio.Debugger.Remote.Resources.ARM", + "Microsoft.VisualStudio.Debugger.Remote.Resources.ARM64", + "Microsoft.VisualStudio.Debugger.Concord.Remote", + "Microsoft.VisualStudio.Debugger.Concord.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote.Resources", + "Microsoft.VisualStudio.Debugger.Remote.Resources", + "Microsoft.VisualStudio.Debugger", + "Microsoft.VisualStudio.VC.MSVCDis", + "Microsoft.IntelliTrace.DiagnosticsHub", + "Microsoft.VisualStudio.Debugger.Concord", + "Microsoft.VisualStudio.Debugger.Concord.Resources", + "Microsoft.VisualStudio.Debugger.Resources", + "Microsoft.VisualStudio.Debugger.Package.DiagHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.VisualStudio.Debugger.Remote.DiagnosticsHub.Client", + "Microsoft.PackageGroup.ClientDiagnostics", + "Microsoft.VisualStudio.AppResponsiveness", + "Microsoft.VisualStudio.AppResponsiveness.Targeted", + "Microsoft.VisualStudio.AppResponsiveness.Resources", + "Microsoft.VisualStudio.ClientDiagnostics", + "Microsoft.VisualStudio.ClientDiagnostics.Targeted", + "Microsoft.VisualStudio.ClientDiagnostics.Resources", + "Microsoft.VisualStudio.PackageGroup.CommunityCore", + "Microsoft.VisualStudio.ProjectSystem.Full", + "Microsoft.VisualStudio.LiveShareApi", + "Microsoft.VisualStudio.ProjectSystem.Query", + "Microsoft.VisualStudio.ProjectSystem", + "Microsoft.VisualStudio.Community.x86", + "Microsoft.VisualStudio.Community.x64", + "Microsoft.VisualStudio.Community.Msi.Resources", + "Microsoft.VisualStudio.Community.Msi", + "Microsoft.VisualStudio.Community.Shared.Msi", + "Microsoft.VisualStudio.Devenv.Msi", + "Microsoft.VisualStudio.Devenv.Shared.Msi", + "Microsoft.VisualStudio.MinShell.Interop.Msi", + "Microsoft.VisualStudio.MinShell.Interop.Shared.Msi", + "Microsoft.VisualStudio.Editors", + "Microsoft.VisualStudio.Workload.CoreEditor", + "Microsoft.VisualStudio.Component.CoreEditor", + "Microsoft.VisualStudio.PackageGroup.CoreEditor", + "Microsoft.WebView2", + "Microsoft.VisualStudio.ScriptedHost", + "Microsoft.VisualStudio.ScriptedHost.Targeted", + "Microsoft.VisualCpp.Tools.Common.UtilsPrereq", + "Microsoft.VisualCpp.Tools.Common.Utils", + "Microsoft.VisualCpp.Tools.Common.Utils.Resources", + "Microsoft.VisualStudio.PackageGroup.VsDevCmd", + "Microsoft.VisualStudio.VsDevCmd.Ext.NetFxSdk", + "Microsoft.VisualStudio.VsDevCmd.Core.WinSdk", + "Microsoft.VisualStudio.VsDevCmd.Core.DotNet", + "Microsoft.VisualStudio.VC.DevCmd", + "Microsoft.VisualStudio.VC.DevCmd.Resources", + "Microsoft.VisualStudio.VirtualTree", + "Microsoft.DiaSymReader", + "Microsoft.Build.Dependencies", + "Microsoft.Build.FileTracker.Msi", + "Microsoft.Build", + "Microsoft.VisualStudio.PackageGroup.NuGet", + "Microsoft.DataAI.NuGetRecommender", + "Microsoft.VisualStudio.NuGet.Core", + "Microsoft.Build.Arm64", + "Microsoft.Build.UnGAC", + "Microsoft.VisualStudio.TextMateGrammars", + "Microsoft.VisualStudio.Platform.Markdown", + "Microsoft.VisualStudio.Platform.CrossRepositorySearch", + "Microsoft.VisualStudio.PackageGroup.TeamExplorer.Common", + "Microsoft.VisualStudio.TeamExplorer", + "Microsoft.VisualStudio.PackageGroup.ServiceHub", + "Microsoft.ServiceHub.Node", + "Microsoft.ServiceHub.Managed", + "Microsoft.ServiceHub.arm64", + "Microsoft.VisualStudio.ProjectServices", + "Microsoft.VisualStudio.OpenFolder.VSIX", + "Microsoft.VisualStudio.FileHandler.Msi", + "Microsoft.VisualStudio.FileHandler.Msi", + "Microsoft.VisualStudio.PackageGroup.MinShell", + "Microsoft.VisualStudio.MinShell.Msi", + "Microsoft.VisualStudio.MinShell.Shared.Msi", + "Microsoft.VisualStudio.MinShell.Msi.Resources", + "Microsoft.VisualStudio.MinShell.Interop", + "CoreEditorFonts", + "Microsoft.VisualStudio.Log", + "Microsoft.VisualStudio.Log.Targeted", + "Microsoft.VisualStudio.Log.Resources", + "Microsoft.VisualStudio.Finalizer", + "Microsoft.VisualStudio.Devenv", + "Microsoft.VisualStudio.Devenv.Resources", + "Microsoft.VisualStudio.CoreEditor", + "Microsoft.VisualStudio.Navigation.RichCodeNav", + "Microsoft.VisualStudio.Platform.NavigateTo", + "Microsoft.VisualStudio.Connected", + "SQLitePCLRaw", + "SQLitePCLRaw.Targeted", + "Microsoft.VisualStudio.Connected.Auto", + "Microsoft.VisualStudio.Connected.Auto.Resources", + "Microsoft.VisualStudio.AzureSDK", + "Microsoft.VisualStudio.PerfLib", + "Microsoft.VisualStudio.Connected.Resources", + "Microsoft.Net.PackageGroup.4.8.Redist", + "Microsoft.VisualStudio.PackageGroup.Progression", + "Microsoft.VisualStudio.PerformanceProvider", + "Microsoft.VisualStudio.GraphModel", + "Microsoft.VisualStudio.GraphProvider", + "Microsoft.VisualStudio.Community.VB.Targeted", + "Microsoft.VisualStudio.Community.VB.Neutral", + "Microsoft.VisualStudio.Community.CSharp.Targeted", + "Microsoft.VisualStudio.Community.CSharp.Neutral", + "Microsoft.VisualStudio.Community.ProductArch.TargetedExtra", + "Microsoft.VisualStudio.Community.ProductArch.Targeted", + "Microsoft.VisualStudio.Community.ProductArch.NeutralExtra", + "Microsoft.DiaSymReader.PortablePdb", + "Microsoft.IntelliTrace.CollectorCab", + "Microsoft.VisualStudio.Community.VB.Resources.Targeted", + "Microsoft.VisualStudio.Community.VB.Resources.Neutral", + "Microsoft.VisualStudio.Community.CSharp.Resources.Targeted", + "Microsoft.VisualStudio.Community.CSharp.Resources.Neutral", + "Microsoft.VisualStudio.Community.ProductArch.Resources.Targeted", + "Microsoft.VisualStudio.Community.ProductArch.Resources.NeutralExtra", + "Microsoft.VisualStudio.Net.Eula.Resources", + "Microsoft.VisualStudio.Community.ProductArch.Resources.Neutral", + "Microsoft.VisualStudio.WebSiteProject.DTE", + "Microsoft.VisualStudio.Diagnostics.AspNetHelper", + "Microsoft.VisualStudio.Diagnostics.AspNetHelper.Standard", + "Microsoft.MSHtml", + "Microsoft.VisualStudio.Platform.CallHierarchy", + "Microsoft.VisualStudio.Community.ProductArch.Neutral", + "Microsoft.VisualStudio.MinShell", + "Microsoft.VisualStudio.VsWebProtocolSelector.Msi", + "Microsoft.Net.6.WindowsDesktop.Runtime", + "Microsoft.Net.6.Runtime", + "Microsoft.VisualStudio.PackageGroup.Setup.Common", + "Microsoft.VisualStudio.Setup.WMIProvider", + "Microsoft.VisualStudio.Setup.Configuration.Interop", + "Microsoft.VisualStudio.Setup.Configuration", + "Microsoft.VisualStudio.Extensibility.Container", + "Microsoft.VisualStudio.LanguageServer", + "Microsoft.VisualStudio.Platform.Terminal", + "Microsoft.VisualStudio.MefHosting", + "Microsoft.VisualStudio.Initializer", + "Microsoft.VisualStudio.ExtensionManager", + "Microsoft.VisualStudio.Platform.Editor", + "Microsoft.VisualStudio.MinShell.Targeted", + "Microsoft.VisualStudio.NativeImageSupport", + "Microsoft.VisualStudio.Devenv.Config", + "Microsoft.VisualStudio.MinShell.Resources.arm64", + "Microsoft.VisualStudio.MinShell.Auto", + "Microsoft.VisualStudio.MinShell.Auto.Resources", + "Microsoft.VisualStudio.Branding.Community" + ] + } +] diff --git a/node_modules/npm/node_modules/node-gyp/test/reporter.js b/node_modules/npm/node_modules/node-gyp/test/reporter.js new file mode 100644 index 0000000..9964b1b --- /dev/null +++ b/node_modules/npm/node_modules/node-gyp/test/reporter.js @@ -0,0 +1,75 @@ +const Mocha = require('mocha') + +class Reporter { + constructor (runner) { + this.failedTests = [] + + runner.on(Mocha.Runner.constants.EVENT_RUN_BEGIN, () => { + console.log('Starting tests') + }) + + runner.on(Mocha.Runner.constants.EVENT_RUN_END, () => { + console.log('Tests finished') + console.log() + console.log('****************') + console.log('* TESTS REPORT *') + console.log('****************') + console.log() + console.log(`Executed ${runner.stats.suites} suites with ${runner.stats.tests} tests in ${runner.stats.duration} ms`) + console.log(` Passed: ${runner.stats.passes}`) + console.log(` Skipped: ${runner.stats.pending}`) + console.log(` Failed: ${runner.stats.failures}`) + if (this.failedTests.length > 0) { + console.log() + console.log(' Failed test details') + this.failedTests.forEach((failedTest, index) => { + console.log() + console.log(` ${index + 1}.'${failedTest.test.fullTitle()}'`) + console.log(` Name: ${failedTest.error.name}`) + console.log(` Message: ${failedTest.error.message}`) + console.log(` Code: ${failedTest.error.code}`) + console.log(` Stack: ${failedTest.error.stack}`) + }) + } + console.log() + }) + + runner.on(Mocha.Runner.constants.EVENT_SUITE_BEGIN, (suite) => { + if (suite.root) { + return + } + console.log(`Starting suite '${suite.title}'`) + }) + + runner.on(Mocha.Runner.constants.EVENT_SUITE_END, (suite) => { + if (suite.root) { + return + } + console.log(`Suite '${suite.title}' finished`) + console.log() + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_BEGIN, (test) => { + console.log(`Starting test '${test.title}'`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_PASS, (test) => { + console.log(`Test '${test.title}' passed in ${test.duration} ms`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_PENDING, (test) => { + console.log(`Test '${test.title}' skipped in ${test.duration} ms`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_FAIL, (test, error) => { + this.failedTests.push({ test, error }) + console.log(`Test '${test.title}' failed in ${test.duration} ms with ${error}`) + }) + + runner.on(Mocha.Runner.constants.EVENT_TEST_END, (test) => { + console.log() + }) + } +} + +module.exports = Reporter diff --git a/node_modules/npm/node_modules/node-gyp/test/test-addon.js b/node_modules/npm/node_modules/node-gyp/test/test-addon.js index f79eff7..4355662 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-addon.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-addon.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const fs = require('graceful-fs') const childProcess = require('child_process') @@ -35,116 +36,117 @@ function checkCharmapValid () { return lines.pop() === 'True' } -test('build simple addon', function (t) { - t.plan(3) - - // Set the loglevel otherwise the output disappears when run via 'npm test' - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') +describe('addon', function () { + this.timeout(300000) + + it('build simple addon', function (done) { + // Set the loglevel otherwise the output disappears when run via 'npm test' + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello().trim(), 'world') + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('build simple addon in path with non-ascii characters', function (t) { - t.plan(1) - if (!checkCharmapValid()) { - return t.skip('python console app can\'t encode non-ascii character.') - } + it('build simple addon in path with non-ascii characters', function (done) { + if (!checkCharmapValid()) { + return this.skip('python console app can\'t encode non-ascii character.') + } - var testDirNames = { - cp936: '文件夹', - cp1252: 'Latīna', - cp932: 'フォルダ' - } - // Select non-ascii characters by current encoding - var testDirName = testDirNames[getEncoding()] - // If encoding is UTF-8 or other then no need to test - if (!testDirName) { - return t.skip('no need to test') - } + var testDirNames = { + cp936: '文件夹', + cp1252: 'Latīna', + cp932: 'フォルダ' + } + // Select non-ascii characters by current encoding + var testDirName = testDirNames[getEncoding()] + // If encoding is UTF-8 or other then no need to test + if (!testDirName) { + return this.skip('no need to test') + } - t.plan(3) + this.timeout(300000) - var data - var configPath = path.join(addonPath, 'build', 'config.gypi') - try { - data = fs.readFileSync(configPath, 'utf8') - } catch (err) { - t.error(err) - return - } - var config = JSON.parse(data.replace(/#.+\n/, '')) - var nodeDir = config.variables.nodedir - var testNodeDir = path.join(addonPath, testDirName) - // Create symbol link to path with non-ascii characters - try { - fs.symlinkSync(nodeDir, testNodeDir, 'dir') - } catch (err) { - switch (err.code) { - case 'EEXIST': break - case 'EPERM': - t.error(err, 'Please try to running console as an administrator') - return - default: - t.error(err) - return + var data + var configPath = path.join(addonPath, 'build', 'config.gypi') + try { + data = fs.readFileSync(configPath, 'utf8') + } catch (err) { + assert.fail(err) + return } - } - - var cmd = [ - nodeGyp, - 'rebuild', - '-C', - addonPath, - '--loglevel=verbose', - '-nodedir=' + testNodeDir - ] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var config = JSON.parse(data.replace(/#.+\n/, '')) + var nodeDir = config.variables.nodedir + var testNodeDir = path.join(addonPath, testDirName) + // Create symbol link to path with non-ascii characters try { - fs.unlink(testNodeDir) + fs.symlinkSync(nodeDir, testNodeDir, 'dir') } catch (err) { - t.error(err) + switch (err.code) { + case 'EEXIST': break + case 'EPERM': + assert.fail(err, null, 'Please try to running console as an administrator') + return + default: + assert.fail(err) + return + } } - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello().trim(), 'world') + var cmd = [ + nodeGyp, + 'rebuild', + '-C', + addonPath, + '--loglevel=verbose', + '-nodedir=' + testNodeDir + ] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + try { + fs.unlink(testNodeDir) + } catch (err) { + assert.fail(err) + } + + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello().trim(), 'world') + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') -}) - -test('addon works with renamed host executable', function (t) { - // No `fs.copyFileSync` before node8. - if (process.version.substr(1).split('.')[0] < 8) { - t.skip('skipping test for old node version') - t.end() - return - } - t.plan(3) - - var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) - fs.copyFileSync(process.execPath, notNodePath) + it('addon works with renamed host executable', function (done) { + // No `fs.copyFileSync` before node8. + if (process.version.substr(1).split('.')[0] < 8) { + return this.skip('skipping test for old node version') + } - var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] - var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { - var logLines = stderr.toString().trim().split(/\r?\n/) - var lastLine = logLines[logLines.length - 1] - t.strictEqual(err, null) - t.strictEqual(lastLine, 'gyp info ok', 'should end in ok') - t.strictEqual(runHello(notNodePath).trim(), 'world') - fs.unlinkSync(notNodePath) + this.timeout(300000) + + var notNodePath = path.join(os.tmpdir(), 'notnode' + path.extname(process.execPath)) + fs.copyFileSync(process.execPath, notNodePath) + + var cmd = [nodeGyp, 'rebuild', '-C', addonPath, '--loglevel=verbose'] + var proc = execFile(process.execPath, cmd, function (err, stdout, stderr) { + var logLines = stderr.toString().trim().split(/\r?\n/) + var lastLine = logLines[logLines.length - 1] + assert.strictEqual(err, null) + assert.strictEqual(lastLine, 'gyp info ok', 'should end in ok') + assert.strictEqual(runHello(notNodePath).trim(), 'world') + fs.unlinkSync(notNodePath) + done() + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-configure-python.js b/node_modules/npm/node_modules/node-gyp/test/test-configure-python.js index aacd75f..ab1e551 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-configure-python.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-configure-python.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const devDir = require('./common').devDir() const gyp = require('../lib/node-gyp') @@ -22,63 +23,59 @@ const configure = requireInject('../lib/configure', { const EXPECTED_PYPATH = path.join(__dirname, '..', 'gyp', 'pylib') const SEPARATOR = process.platform === 'win32' ? ';' : ':' -const SPAWN_RESULT = { on: function () { } } +const SPAWN_RESULT = cb => ({ on: function () { cb() } }) require('npmlog').level = 'warn' -test('configure PYTHONPATH with no existing env', function (t) { - t.plan(1) - - delete process.env.PYTHONPATH - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, EXPECTED_PYPATH) - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of one dir', function (t) { - t.plan(2) - - var existingPath = path.join('a', 'b') - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, existingPath]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) -}) - -test('configure PYTHONPATH with existing env of multiple dirs', function (t) { - t.plan(2) - - var pythonDir1 = path.join('a', 'b') - var pythonDir2 = path.join('b', 'c') - var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) - process.env.PYTHONPATH = existingPath - - var prog = gyp() - prog.parseArgv([]) - prog.spawn = function () { - t.equal(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) - - var dirs = process.env.PYTHONPATH.split(SEPARATOR) - t.deepEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) - - return SPAWN_RESULT - } - prog.devDir = devDir - configure(prog, [], t.fail) +describe('configure-python', function () { + it('configure PYTHONPATH with no existing env', function (done) { + delete process.env.PYTHONPATH + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, EXPECTED_PYPATH) + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) + + it('configure PYTHONPATH with existing env of one dir', function (done) { + var existingPath = path.join('a', 'b') + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + assert.deepStrictEqual(dirs, [EXPECTED_PYPATH, existingPath]) + + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) + + it('configure PYTHONPATH with existing env of multiple dirs', function (done) { + var pythonDir1 = path.join('a', 'b') + var pythonDir2 = path.join('b', 'c') + var existingPath = [pythonDir1, pythonDir2].join(SEPARATOR) + process.env.PYTHONPATH = existingPath + + var prog = gyp() + prog.parseArgv([]) + prog.spawn = function () { + assert.strictEqual(process.env.PYTHONPATH, [EXPECTED_PYPATH, existingPath].join(SEPARATOR)) + + var dirs = process.env.PYTHONPATH.split(SEPARATOR) + assert.deepStrictEqual(dirs, [EXPECTED_PYPATH, pythonDir1, pythonDir2]) + + return SPAWN_RESULT(done) + } + prog.devDir = devDir + configure(prog, [], assert.fail) + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-create-config-gypi.js b/node_modules/npm/node_modules/node-gyp/test/test-create-config-gypi.js index eeac73f..725819b 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-create-config-gypi.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-create-config-gypi.js @@ -1,70 +1,61 @@ 'use strict' const path = require('path') -const { test } = require('tap') +const { describe, it } = require('mocha') +const assert = require('assert') const gyp = require('../lib/node-gyp') const createConfigGypi = require('../lib/create-config-gypi') const { parseConfigGypi, getCurrentConfigGypi } = createConfigGypi.test -test('config.gypi with no options', async function (t) { - t.plan(2) +describe('create-config-gypi', function () { + it('config.gypi with no options', async function () { + const prog = gyp() + prog.parseArgv([]) - const prog = gyp() - prog.parseArgv([]) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.target_defaults.default_configuration, 'Release') + assert.strictEqual(config.variables.target_arch, process.arch) + }) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.target_defaults.default_configuration, 'Release') - t.equal(config.variables.target_arch, process.arch) -}) - -test('config.gypi with --debug', async function (t) { - t.plan(1) - - const prog = gyp() - prog.parseArgv(['_', '_', '--debug']) + it('config.gypi with --debug', async function () { + const prog = gyp() + prog.parseArgv(['_', '_', '--debug']) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.target_defaults.default_configuration, 'Debug') -}) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.target_defaults.default_configuration, 'Debug') + }) -test('config.gypi with custom options', async function (t) { - t.plan(1) + it('config.gypi with custom options', async function () { + const prog = gyp() + prog.parseArgv(['_', '_', '--shared-libxml2']) - const prog = gyp() - prog.parseArgv(['_', '_', '--shared-libxml2']) + const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) + assert.strictEqual(config.variables.shared_libxml2, true) + }) - const config = await getCurrentConfigGypi({ gyp: prog, vsInfo: {} }) - t.equal(config.variables.shared_libxml2, true) -}) + it('config.gypi with nodedir', async function () { + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') -test('config.gypi with nodedir', async function (t) { - t.plan(1) + const prog = gyp() + prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) - const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + assert.strictEqual(config.variables.build_with_electron, true) + }) - const prog = gyp() - prog.parseArgv(['_', '_', `--nodedir=${nodeDir}`]) + it('config.gypi with --force-process-config', async function () { + const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') - const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) - t.equal(config.variables.build_with_electron, true) -}) - -test('config.gypi with --force-process-config', async function (t) { - t.plan(1) - - const nodeDir = path.join(__dirname, 'fixtures', 'nodedir') - - const prog = gyp() - prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) - - const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) - t.equal(config.variables.build_with_electron, undefined) -}) + const prog = gyp() + prog.parseArgv(['_', '_', '--force-process-config', `--nodedir=${nodeDir}`]) -test('config.gypi parsing', function (t) { - t.plan(1) + const config = await getCurrentConfigGypi({ gyp: prog, nodeDir, vsInfo: {} }) + assert.strictEqual(config.variables.build_with_electron, undefined) + }) - const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" - const config = parseConfigGypi(str) - t.deepEqual(config, { variables: { multiline: 'AB' } }) + it('config.gypi parsing', function () { + const str = "# Some comments\n{'variables': {'multiline': 'A'\n'B'}}" + const config = parseConfigGypi(str) + assert.deepStrictEqual(config, { variables: { multiline: 'AB' } }) + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-download.js b/node_modules/npm/node_modules/node-gyp/test/test-download.js index c4caad9..1dd5a51 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-download.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-download.js @@ -1,6 +1,7 @@ 'use strict' -const { test } = require('tap') +const { describe, it, after } = require('mocha') +const assert = require('assert') const fs = require('fs') const path = require('path') const util = require('util') @@ -16,202 +17,194 @@ const certs = require('./fixtures/certs') log.level = 'warn' -test('download over http', async (t) => { - t.plan(2) - - const server = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') - }) - - t.tearDown(() => new Promise((resolve) => server.close(resolve))) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - const gyp = { - opts: {}, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download over https with custom ca', async (t) => { - t.plan(3) - - const cafile = path.join(__dirname, 'fixtures/ca.crt') - const cacontents = certs['ca.crt'] - const cert = certs['server.crt'] - const key = certs['server.key'] - await fs.promises.writeFile(cafile, cacontents, 'utf8') - const ca = await install.test.readCAFile(cafile) - - t.strictEqual(ca.length, 1) - - const options = { ca: ca, cert: cert, key: key } - const server = https.createServer(options, (req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') +describe('download', function () { + it('download over http', async function () { + const server = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + after(() => new Promise((resolve) => server.close(resolve))) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: {}, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - t.tearDown(async () => { - await new Promise((resolve) => server.close(resolve)) - await fs.promises.unlink(cafile) + it('download over https with custom ca', async function () { + const cafile = path.join(__dirname, 'fixtures/ca.crt') + const cacontents = certs['ca.crt'] + const cert = certs['server.crt'] + const key = certs['server.key'] + await fs.promises.writeFile(cafile, cacontents, 'utf8') + const ca = await install.test.readCAFile(cafile) + + assert.strictEqual(ca.length, 1) + + const options = { ca: ca, cert: cert, key: key } + const server = https.createServer(options, (req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + after(async () => { + await new Promise((resolve) => server.close(resolve)) + await fs.promises.unlink(cafile) + }) + + server.on('clientError', (err) => { throw err }) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + const gyp = { + opts: { cafile }, + version: '42' + } + const url = `https://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - server.on('clientError', (err) => { throw err }) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - const gyp = { - opts: { cafile }, - version: '42' - } - const url = `https://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download over http with proxy', async (t) => { - t.plan(2) - - const server = http.createServer((_, res) => { - res.end('ok') + it('download over http with proxy', async function () { + const server = http.createServer((_, res) => { + res.end('ok') + }) + + const pserver = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('proxy ok') + }) + + after(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: 'bad' + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'proxy ok') }) - const pserver = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('proxy ok') + it('download over http with noproxy', async function () { + const server = http.createServer((req, res) => { + assert.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) + res.end('ok') + }) + + const pserver = http.createServer((_, res) => { + res.end('proxy ok') + }) + + after(() => Promise.all([ + new Promise((resolve) => server.close(resolve)), + new Promise((resolve) => pserver.close(resolve)) + ])) + + const host = 'localhost' + await new Promise((resolve) => server.listen(0, host, resolve)) + const { port } = server.address() + await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) + const gyp = { + opts: { + proxy: `http://${host}:${port + 1}`, + noproxy: host + }, + version: '42' + } + const url = `http://${host}:${port}` + const res = await install.test.download(gyp, url) + assert.strictEqual(await res.text(), 'ok') }) - t.tearDown(() => Promise.all([ - new Promise((resolve) => server.close(resolve)), - new Promise((resolve) => pserver.close(resolve)) - ])) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) - const gyp = { - opts: { - proxy: `http://${host}:${port + 1}`, - noproxy: 'bad' - }, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'proxy ok') -}) - -test('download over http with noproxy', async (t) => { - t.plan(2) - - const server = http.createServer((req, res) => { - t.strictEqual(req.headers['user-agent'], `node-gyp v42 (node ${process.version})`) - res.end('ok') + it('download with missing cafile', async function () { + const gyp = { + opts: { cafile: 'no.such.file' } + } + try { + await install.test.download(gyp, {}, 'http://bad/') + } catch (e) { + assert.ok(/no.such.file/.test(e.message)) + } }) - const pserver = http.createServer((_, res) => { - res.end('proxy ok') + it('check certificate splitting', async function () { + const cafile = path.join(__dirname, 'fixtures/ca-bundle.crt') + const cacontents = certs['ca-bundle.crt'] + await fs.promises.writeFile(cafile, cacontents, 'utf8') + after(async () => { + await fs.promises.unlink(cafile) + }) + const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) + assert.strictEqual(cas.length, 2) + assert.notStrictEqual(cas[0], cas[1]) }) - t.tearDown(() => Promise.all([ - new Promise((resolve) => server.close(resolve)), - new Promise((resolve) => pserver.close(resolve)) - ])) - - const host = 'localhost' - await new Promise((resolve) => server.listen(0, host, resolve)) - const { port } = server.address() - await new Promise((resolve) => pserver.listen(port + 1, host, resolve)) - const gyp = { - opts: { - proxy: `http://${host}:${port + 1}`, - noproxy: host - }, - version: '42' - } - const url = `http://${host}:${port}` - const res = await install.test.download(gyp, url) - t.strictEqual(await res.text(), 'ok') -}) - -test('download with missing cafile', async (t) => { - t.plan(1) - const gyp = { - opts: { cafile: 'no.such.file' } - } - try { - await install.test.download(gyp, {}, 'http://bad/') - } catch (e) { - t.ok(/no.such.file/.test(e.message)) - } -}) - -test('check certificate splitting', async (t) => { - const cafile = path.join(__dirname, 'fixtures/ca-bundle.crt') - const cacontents = certs['ca-bundle.crt'] - await fs.promises.writeFile(cafile, cacontents, 'utf8') - t.tearDown(async () => { - await fs.promises.unlink(cafile) + // only run this test if we are running a version of Node with predictable version path behavior + + it('download headers (actual)', async function () { + if (process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10')) { + return this.skip('Skipping actual download of headers due to test environment configuration') + } + + this.timeout(300000) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + log.level = 'warn' + await util.promisify(install)(prog, []) + + const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') + assert.strictEqual(data, '11\n', 'correct installVersion') + + const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) + assert.ok(list.includes('common.gypi')) + assert.ok(list.includes('config.gypi')) + assert.ok(list.includes('node.h')) + assert.ok(list.includes('node_version.h')) + assert.ok(list.includes('openssl')) + assert.ok(list.includes('uv')) + assert.ok(list.includes('uv.h')) + assert.ok(list.includes('v8-platform.h')) + assert.ok(list.includes('v8.h')) + assert.ok(list.includes('zlib.h')) + + const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') + + // extract the 3 version parts from the defines to build a valid version string and + // and check them against our current env version + const version = ['major', 'minor', 'patch'].reduce((version, type) => { + const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) + const line = lines.find((l) => re.test(l)) + const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' + return `${version}${type !== 'major' ? '.' : 'v'}${i}` + }, '') + + assert.strictEqual(version, process.version) }) - const cas = await install.test.readCAFile(path.join(__dirname, 'fixtures/ca-bundle.crt')) - t.plan(2) - t.strictEqual(cas.length, 2) - t.notStrictEqual(cas[0], cas[1]) -}) - -// only run this test if we are running a version of Node with predictable version path behavior - -test('download headers (actual)', async (t) => { - if (process.env.FAST_TEST || - process.release.name !== 'node' || - semver.prerelease(process.version) !== null || - semver.satisfies(process.version, '<10')) { - return t.skip('Skipping actual download of headers due to test environment configuration') - } - - t.plan(12) - - const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) - await util.promisify(rimraf)(expectedDir) - - const prog = gyp() - prog.parseArgv([]) - prog.devDir = devDir - log.level = 'warn' - await util.promisify(install)(prog, []) - - const data = await fs.promises.readFile(path.join(expectedDir, 'installVersion'), 'utf8') - t.strictEqual(data, '9\n', 'correct installVersion') - - const list = await fs.promises.readdir(path.join(expectedDir, 'include/node')) - t.ok(list.includes('common.gypi')) - t.ok(list.includes('config.gypi')) - t.ok(list.includes('node.h')) - t.ok(list.includes('node_version.h')) - t.ok(list.includes('openssl')) - t.ok(list.includes('uv')) - t.ok(list.includes('uv.h')) - t.ok(list.includes('v8-platform.h')) - t.ok(list.includes('v8.h')) - t.ok(list.includes('zlib.h')) - - const lines = (await fs.promises.readFile(path.join(expectedDir, 'include/node/node_version.h'), 'utf8')).split('\n') - - // extract the 3 version parts from the defines to build a valid version string and - // and check them against our current env version - const version = ['major', 'minor', 'patch'].reduce((version, type) => { - const re = new RegExp(`^#define\\sNODE_${type.toUpperCase()}_VERSION`) - const line = lines.find((l) => re.test(l)) - const i = line ? parseInt(line.replace(/^[^0-9]+([0-9]+).*$/, '$1'), 10) : 'ERROR' - return `${version}${type !== 'major' ? '.' : 'v'}${i}` - }, '') - - t.strictEqual(version, process.version) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-find-accessible-sync.js b/node_modules/npm/node_modules/node-gyp/test/test-find-accessible-sync.js index 0a2e584..7edbc0c 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-find-accessible-sync.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-find-accessible-sync.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const requireInject = require('require-inject') const configure = requireInject('../lib/configure', { @@ -27,58 +28,46 @@ const readableFiles = [ path.resolve(dir, readableFileInDir) ] -test('find accessible - empty array', function (t) { - t.plan(1) - - var candidates = [] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - single item array, readable', function (t) { - t.plan(1) - - var candidates = [readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - single item array, readable in subdir', function (t) { - t.plan(1) - - var candidates = [readableFileInDir] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFileInDir)) -}) - -test('find accessible - single item array, unreadable', function (t) { - t.plan(1) - - var candidates = ['unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, no matches', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', 'unreadable_file'] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, undefined) -}) - -test('find accessible - multi item array, single match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, readableFile)) -}) - -test('find accessible - multi item array, return first match', function (t) { - t.plan(1) - - var candidates = ['non_existent_file', anotherReadableFile, readableFile] - var found = configure.test.findAccessibleSync('test', dir, candidates) - t.strictEqual(found, path.resolve(dir, anotherReadableFile)) +describe('find-accessible-sync', function () { + it('find accessible - empty array', function () { + var candidates = [] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - single item array, readable', function () { + var candidates = [readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFile)) + }) + + it('find accessible - single item array, readable in subdir', function () { + var candidates = [readableFileInDir] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFileInDir)) + }) + + it('find accessible - single item array, unreadable', function () { + var candidates = ['unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - multi item array, no matches', function () { + var candidates = ['non_existent_file', 'unreadable_file'] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, undefined) + }) + + it('find accessible - multi item array, single match', function () { + var candidates = ['non_existent_file', readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, readableFile)) + }) + + it('find accessible - multi item array, return first match', function () { + var candidates = ['non_existent_file', anotherReadableFile, readableFile] + var found = configure.test.findAccessibleSync('test', dir, candidates) + assert.strictEqual(found, path.resolve(dir, anotherReadableFile)) + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-find-node-directory.js b/node_modules/npm/node_modules/node-gyp/test/test-find-node-directory.js index fa6223c..ca299f6 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-find-node-directory.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-find-node-directory.js @@ -1,119 +1,115 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const path = require('path') const findNodeDirectory = require('../lib/find-node-directory') const platforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32', 'aix', 'os400'] -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), - path.join('/x')) - } -}) +describe('find-node-directory', function () { + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in a build tree where npm is installed in + // .... /deps/npm + it('test find-node-directory - node install', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/x/deps/npm/node_modules/node-gyp/lib', processObj), + path.join('/x')) + } + }) -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in an installed tree where npm is installed in -// .... /lib/node_modules/npm or .../node_modules/npm -// depending on the patform -test('test find-node-directory - node build', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - if (platforms[next] === 'win32') { - t.equal( - findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) - } else { - t.equal( - findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', - processObj), path.join('/y')) + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in an installed tree where npm is installed in + // .... /lib/node_modules/npm or .../node_modules/npm + // depending on the patform + it('test find-node-directory - node build', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + if (platforms[next] === 'win32') { + assert.strictEqual( + findNodeDirectory('/y/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } else { + assert.strictEqual( + findNodeDirectory('/y/lib/node_modules/npm/node_modules/node-gyp/lib', + processObj), path.join('/y')) + } } - } -}) + }) -// we should find the directory based on the execPath -// for node and match because it was in the bin directory -test('test find-node-directory - node in bin directory', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) + // we should find the directory based on the execPath + // for node and match because it was in the bin directory + it('test find-node-directory - node in bin directory', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) + } + }) -// we should find the directory based on the execPath -// for node and match because it was in the Release directory -test('test find-node-directory - node in build release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } - } else { - processObj = { - execPath: '/x/y/out/Release/node', - platform: platforms[next] + // we should find the directory based on the execPath + // for node and match because it was in the Release directory + it('test find-node-directory - node in build release dir', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/x/y/Release/node', platform: platforms[next] } + } else { + processObj = { + execPath: '/x/y/out/Release/node', + platform: platforms[next] + } } + + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/x/y')) } + }) - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/x/y')) - } -}) + // we should find the directory based on the execPath + // for node and match because it was in the Debug directory + it('test find-node-directory - node in Debug release dir', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj + if (platforms[next] === 'win32') { + processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } + } else { + processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + } -// we should find the directory based on the execPath -// for node and match because it was in the Debug directory -test('test find-node-directory - node in Debug release dir', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj - if (platforms[next] === 'win32') { - processObj = { execPath: '/a/b/Debug/node', platform: platforms[next] } - } else { - processObj = { execPath: '/a/b/out/Debug/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), + path.join('/a/b')) } + }) - t.equal( - findNodeDirectory('/nothere/npm/node_modules/node-gyp/lib', processObj), - path.join('/a/b')) - } -}) - -// we should not find it as it will not match based on the execPath nor -// the directory from which the script is running -test('test find-node-directory - not found', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/z/y', platform: next } - t.equal(findNodeDirectory('/a/b/c/d', processObj), '') - } -}) + // we should not find it as it will not match based on the execPath nor + // the directory from which the script is running + it('test find-node-directory - not found', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/z/y', platform: next } + assert.strictEqual(findNodeDirectory('/a/b/c/d', processObj), '') + } + }) -// we should find the directory based on the directory -// the script is running in and it should match the layout -// in a build tree where npm is installed in -// .... /deps/npm -// same test as above but make sure additional directory entries -// don't cause an issue -test('test find-node-directory - node install', function (t) { - t.plan(platforms.length) - for (var next = 0; next < platforms.length; next++) { - var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } - t.equal( - findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', - processObj), path.join('/x/y/z/a/b/c')) - } + // we should find the directory based on the directory + // the script is running in and it should match the layout + // in a build tree where npm is installed in + // .... /deps/npm + // same test as above but make sure additional directory entries + // don't cause an issue + it('test find-node-directory - node install', function () { + for (var next = 0; next < platforms.length; next++) { + var processObj = { execPath: '/x/y/bin/node', platform: platforms[next] } + assert.strictEqual( + findNodeDirectory('/x/y/z/a/b/c/deps/npm/node_modules/node-gyp/lib', + processObj), path.join('/x/y/z/a/b/c')) + } + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-find-python.js b/node_modules/npm/node_modules/node-gyp/test/test-find-python.js index 67d0b26..592c480 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-find-python.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-find-python.js @@ -2,225 +2,212 @@ delete process.env.PYTHON -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const findPython = require('../lib/find-python') const execFile = require('child_process').execFile const PythonFinder = findPython.test.PythonFinder require('npmlog').level = 'warn' -test('find python', function (t) { - t.plan(4) - - findPython.test.findPython(null, function (err, found) { - t.strictEqual(err, null) - var proc = execFile(found, ['-V'], function (err, stdout, stderr) { - t.strictEqual(err, null) - t.ok(/Python 3/.test(stdout)) - t.strictEqual(stderr, '') +describe('find-python', function () { + it('find python', function () { + findPython.test.findPython(null, function (err, found) { + assert.strictEqual(err, null) + var proc = execFile(found, ['-V'], function (err, stdout, stderr) { + assert.strictEqual(err, null) + assert.ok(/Python 3/.test(stdout)) + assert.strictEqual(stderr, '') + }) + proc.stdout.setEncoding('utf-8') + proc.stderr.setEncoding('utf-8') }) - proc.stdout.setEncoding('utf-8') - proc.stderr.setEncoding('utf-8') }) -}) -function poison (object, property) { - function fail () { - console.error(Error(`Property ${property} should not have been accessed.`)) - process.abort() - } - var descriptor = { - configurable: false, - enumerable: false, - get: fail, - set: fail - } - Object.defineProperty(object, property, descriptor) -} - -function TestPythonFinder () { - PythonFinder.apply(this, arguments) -} -TestPythonFinder.prototype = Object.create(PythonFinder.prototype) -// Silence npmlog - remove for debugging -TestPythonFinder.prototype.log = { - silly: () => {}, - verbose: () => {}, - info: () => {}, - warn: () => {}, - error: () => {} -} -delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON - -test('find python - python', function (t) { - t.plan(6) - - var f = new TestPythonFinder('python', done) - f.execFile = function (program, args, opts, cb) { - f.execFile = function (program, args, opts, cb) { - poison(f, 'execFile') - t.strictEqual(program, '/path/python') - t.ok(/sys\.version_info/.test(args[1])) - cb(null, '3.9.1') - } - t.strictEqual(program, - process.platform === 'win32' ? '"python"' : 'python') - t.ok(/sys\.executable/.test(args[1])) - cb(null, '/path/python') + function poison (object, property) { + function fail () { + console.error(Error(`Property ${property} should not have been accessed.`)) + process.abort() + } + var descriptor = { + configurable: false, + enumerable: false, + get: fail, + set: fail + } + Object.defineProperty(object, property, descriptor) } - f.findPython() - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, '/path/python') + function TestPythonFinder () { + PythonFinder.apply(this, arguments) } -}) - -test('find python - python too old', function (t) { - t.plan(2) + TestPythonFinder.prototype = Object.create(PythonFinder.prototype) + // Silence npmlog - remove for debugging + TestPythonFinder.prototype.log = { + silly: () => {}, + verbose: () => {}, + info: () => {}, + warn: () => {}, + error: () => {} + } + delete TestPythonFinder.prototype.env.NODE_GYP_FORCE_PYTHON - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { + it('find python - python', function () { + var f = new TestPythonFinder('python', done) + f.execFile = function (program, args, opts, cb) { + f.execFile = function (program, args, opts, cb) { + poison(f, 'execFile') + assert.strictEqual(program, '/path/python') + assert.ok(/sys\.version_info/.test(args[1])) + cb(null, '3.9.1') + } + assert.strictEqual(program, + process.platform === 'win32' ? '"python"' : 'python') + assert.ok(/sys\.executable/.test(args[1])) cb(null, '/path/python') - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(null, '2.3.4') - } else { - t.fail() } - } - f.findPython() - - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not supported/i.test(f.errorLog)) - } -}) - -test('find python - no python', function (t) { - t.plan(2) + f.findPython() - var f = new TestPythonFinder(null, done) - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() + function done (err, python) { + assert.strictEqual(err, null) + assert.strictEqual(python, '/path/python') } - } - f.findPython() + }) - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) + it('find python - python too old', function () { + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(null, '/path/python') + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(null, '2.3.4') + } else { + assert.fail() + } + } + f.findPython() -test('find python - no python2, no python, unix', function (t) { - t.plan(2) + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not supported/i.test(f.errorLog)) + } + }) - var f = new TestPythonFinder(null, done) - f.checkPyLauncher = t.fail - f.win = false + it('find python - no python', function () { + var f = new TestPythonFinder(null, done) + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + assert.fail() + } + } + f.findPython() - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else { - t.fail() + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) } - } - f.findPython() + }) - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } -}) + it('find python - no python2, no python, unix', function () { + var f = new TestPythonFinder(null, done) + f.checkPyLauncher = assert.fail + f.win = false -test('find python - no python, use python launcher', function (t) { - t.plan(4) - - var f = new TestPythonFinder(null, done) - f.win = true - - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - t.notEqual(args.indexOf('-3'), -1) - t.notEqual(args.indexOf('-c'), -1) - return cb(null, 'Z:\\snake.exe') - } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (f.winDefaultLocations.includes(program)) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - if (program === 'Z:\\snake.exe') { - cb(null, '3.9.0') + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) } else { - t.fail() + assert.fail() } - } else { - t.fail() } - } - f.findPython() + f.findPython() - function done (err, python) { - t.strictEqual(err, null) - t.strictEqual(python, 'Z:\\snake.exe') - } -}) - -test('find python - no python, no python launcher, good guess', function (t) { - t.plan(2) + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) + } + }) - var f = new TestPythonFinder(null, done) - f.win = true - const expectedProgram = f.winDefaultLocations[0] + it('find python - no python, use python launcher', function () { + var f = new TestPythonFinder(null, done) + f.win = true - f.execFile = function (program, args, opts, cb) { - if (program === 'py.exe') { - return cb(new Error('not found')) + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + assert.notStrictEqual(args.indexOf('-3'), -1) + assert.notStrictEqual(args.indexOf('-c'), -1) + return cb(null, 'Z:\\snake.exe') + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (f.winDefaultLocations.includes(program)) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + if (program === 'Z:\\snake.exe') { + cb(null, '3.9.0') + } else { + assert.fail() + } + } else { + assert.fail() + } } - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (program === expectedProgram && - /sys\.version_info/.test(args[args.length - 1])) { - cb(null, '3.7.3') - } else { - t.fail() + f.findPython() + + function done (err, python) { + assert.strictEqual(err, null) + assert.strictEqual(python, 'Z:\\snake.exe') } - } - f.findPython() + }) - function done (err, python) { - t.strictEqual(err, null) - t.ok(python === expectedProgram) - } -}) + it('find python - no python, no python launcher, good guess', function () { + var f = new TestPythonFinder(null, done) + f.win = true + const expectedProgram = f.winDefaultLocations[0] + + f.execFile = function (program, args, opts, cb) { + if (program === 'py.exe') { + return cb(new Error('not found')) + } + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (program === expectedProgram && + /sys\.version_info/.test(args[args.length - 1])) { + cb(null, '3.7.3') + } else { + assert.fail() + } + } + f.findPython() -test('find python - no python, no python launcher, bad guess', function (t) { - t.plan(2) + function done (err, python) { + assert.strictEqual(err, null) + assert.ok(python === expectedProgram) + } + }) - var f = new TestPythonFinder(null, done) - f.win = true + it('find python - no python, no python launcher, bad guess', function () { + var f = new TestPythonFinder(null, done) + f.win = true - f.execFile = function (program, args, opts, cb) { - if (/sys\.executable/.test(args[args.length - 1])) { - cb(new Error('not found')) - } else if (/sys\.version_info/.test(args[args.length - 1])) { - cb(new Error('not a Python executable')) - } else { - t.fail() + f.execFile = function (program, args, opts, cb) { + if (/sys\.executable/.test(args[args.length - 1])) { + cb(new Error('not found')) + } else if (/sys\.version_info/.test(args[args.length - 1])) { + cb(new Error('not a Python executable')) + } else { + assert.fail() + } } - } - f.findPython() + f.findPython() - function done (err) { - t.ok(/Could not find any Python/.test(err)) - t.ok(/not in PATH/.test(f.errorLog)) - } + function done (err) { + assert.ok(/Could not find any Python/.test(err)) + assert.ok(/not in PATH/.test(f.errorLog)) + } + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-find-visualstudio.js b/node_modules/npm/node_modules/node-gyp/test/test-find-visualstudio.js index 1327cf8..29d9a7d 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-find-visualstudio.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-find-visualstudio.js @@ -1,6 +1,7 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const fs = require('fs') const path = require('path') const findVisualStudio = require('../lib/find-visualstudio') @@ -35,642 +36,635 @@ TestVisualStudioFinder.prototype.log = { error: () => {} } -test('VS2013', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild12\\MSBuild.exe', - path: 'C:\\VS2013', - sdk: null, - toolset: 'v120', - version: '12.0', - versionMajor: 12, - versionMinor: 0, - versionYear: 2013 +describe('find-visualstudio', function () { + it('VS2013', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\MSBuild12\\MSBuild.exe', + path: 'C:\\VS2013', + sdk: null, + toolset: 'v120', + version: '12.0', + versionMajor: 12, + versionMinor: 0, + versionYear: 2013 + }) }) - }) - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild12\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('VS2013 should not be found on new node versions', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder({ - major: 10, - minor: 0, - patch: 0 - }, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) - - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - continue - default: - t.fail(`unexpected search for registry value ${fullName}`) + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild12\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } } + return cb(new Error()) } - return cb(new Error()) - } - finder.findVisualStudio() -}) + finder.findVisualStudio() + }) -test('VS2015', function (t) { - t.plan(4) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\MSBuild14\\MSBuild.exe', - path: 'C:\\VS2015', - sdk: null, - toolset: 'v140', - version: '14.0', - versionMajor: 14, - versionMinor: 0, - versionYear: 2015 + it('VS2013 should not be found on new node versions', function () { + const finder = new TestVisualStudioFinder({ + major: 10, + minor: 0, + patch: 0 + }, null, (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') }) - }) - finder.findVisualStudio2017OrNewer = (cb) => { - finder.parseData(new Error(), '', '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - t.pass(`expected search for registry value ${fullName}`) - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + continue + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } } + return cb(new Error()) } - return cb(new Error()) - } - finder.findVisualStudio() -}) - -test('error from PowerShell', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(new Error(), '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.findVisualStudio() }) -}) - -test('empty output from PowerShell', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('VS2015', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\MSBuild14\\MSBuild.exe', + path: 'C:\\VS2015', + sdk: null, + toolset: 'v140', + version: '14.0', + versionMajor: 14, + versionMinor: 0, + versionYear: 2015 + }) + }) - finder.parseData(null, '', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.findVisualStudio2017OrNewer = (cb) => { + finder.parseData(new Error(), '', '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + assert.ok(true, `expected search for registry value ${fullName}`) + return cb(null, 'C:\\MSBuild14\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + finder.findVisualStudio() }) -}) - -test('output from PowerShell not JSON', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('error from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, 'AAAABBBB', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(new Error(), '', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('wrong JSON from PowerShell', function (t) { - t.plan(2) + it('empty output from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - const finder = new TestVisualStudioFinder(semverV1, null, null) - - finder.parseData(null, '{}', '', (info) => { - t.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) - -test('empty JSON from PowerShell', function (t) { - t.plan(2) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('output from PowerShell not JSON', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, '[]', '', (info) => { - t.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, 'AAAABBBB', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) - -test('future version', function (t) { - t.plan(3) - const finder = new TestVisualStudioFinder(semverV1, null, null) + it('wrong JSON from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - finder.parseData(null, JSON.stringify([{ - packages: [ - 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', - 'Microsoft.VisualStudio.Component.Windows10SDK.17763', - 'Microsoft.VisualStudio.VC.MSBuild.Base' - ], - path: 'C:\\VS', - version: '9999.9999.9999.9999' - }]), '', (info) => { - t.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '{}', '', (info) => { + assert.ok(/use PowerShell/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('single unusable VS2017', function (t) { - t.plan(3) + it('empty JSON from PowerShell', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) - const finder = new TestVisualStudioFinder(semverV1, null, null) - - const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', (info) => { - t.ok(/checking/i.test(finder.errorLog[0]), 'expect error') - t.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') - t.false(info, 'no data') + finder.parseData(null, '[]', '', (info) => { + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[0]), 'expect error') + assert.ok(!info, 'no data') + }) }) -}) -test('minimal VS2017 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + it('future version', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) + + finder.parseData(null, JSON.stringify([{ + packages: [ + 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64', + 'Microsoft.VisualStudio.Component.Windows10SDK.17763', + 'Microsoft.VisualStudio.VC.MSBuild.Base' + ], + path: 'C:\\VS', + version: '9999.9999.9999.9999' + }]), '', (info) => { + assert.ok(/unknown version/i.test(finder.errorLog[0]), 'expect error') + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[1]), 'expect error') + assert.ok(!info, 'no data') }) }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + it('single unusable VS2017', function () { + const finder = new TestVisualStudioFinder(semverV1, null, null) -test('VS2017 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.665', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + const file = path.join(__dirname, 'fixtures', 'VS_2017_Unusable.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', (info) => { + assert.ok(/checking/i.test(finder.errorLog[0]), 'expect error') + assert.ok(/find .* Visual Studio/i.test(finder.errorLog[2]), 'expect error') + assert.ok(!info, 'no data') }) }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) - -test('VS2017 Express', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + - 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', - sdk: '10.0.17763.0', - toolset: 'v141', - version: '15.9.28307.858', - versionMajor: 15, - versionMinor: 9, - versionYear: 2017 + it('minimal VS2017 Build Tools', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'BuildTools\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('VS2019 Preview with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.0.28608.199', - versionMajor: 16, - versionMinor: 0, - versionYear: 2019 + it('VS2017 Community with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'Community\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.665', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('minimal VS2019 Build Tools', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - sdk: '10.0.17134.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 + it('VS2017 Express', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\' + + 'WDExpress\\MSBuild\\15.0\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\WDExpress', + sdk: '10.0.17763.0', + toolset: 'v141', + version: '15.9.28307.858', + versionMajor: 15, + versionMinor: 9, + versionYear: 2017 + }) }) - }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', 'VS_2017_Express.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() + }) -test('VS2019 Community with C++ workload', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info, { - msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + - 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', - path: - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', - sdk: '10.0.17763.0', - toolset: 'v142', - version: '16.1.28922.388', - versionMajor: 16, - versionMinor: 1, - versionYear: 2019 + it('VS2019 Preview with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Preview\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Preview', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.0.28608.199', + versionMajor: 16, + versionMinor: 0, + versionYear: 2019 + }) }) + + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - poison(finder, 'regSearchKeys') - finder.findVisualStudio2017OrNewer = (cb) => { - const file = path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt') - const data = fs.readFileSync(file) - finder.parseData(null, data, '', cb) - } - finder.findVisualStudio() -}) + it('minimal VS2019 Build Tools', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'BuildTools\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + sdk: '10.0.17134.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) -function allVsVersions (t, finder) { - finder.findVisualStudio2017OrNewer = (cb) => { - const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Unusable.txt'))) - const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_BuildTools_minimal.txt'))) - const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Community_workload.txt'))) - const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2017_Express.txt'))) - const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Preview.txt'))) - const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_BuildTools_minimal.txt'))) - const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', - 'VS_2019_Community_workload.txt'))) - const data = JSON.stringify(data0.concat(data1, data2, data3, data4, - data5, data6)) - finder.parseData(null, data, '', cb) - } - finder.regSearchKeys = (keys, value, addOpts, cb) => { - for (var i = 0; i < keys.length; ++i) { - const fullName = `${keys[i]}\\${value}` - switch (fullName) { - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - continue - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': - return cb(null, 'C:\\VS2013\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild12\\') - case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': - return cb(null, 'C:\\VS2015\\VC\\') - case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': - return cb(null, 'C:\\MSBuild14\\') - default: - t.fail(`unexpected search for registry value ${fullName}`) - } + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) } - return cb(new Error()) - } -} + finder.findVisualStudio() + }) -test('fail when looking for invalid path', function (t) { - t.plan(2) + it('VS2019 Community with C++ workload', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe', + path: + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\Community', + sdk: '10.0.17763.0', + toolset: 'v142', + version: '16.1.28922.388', + versionMajor: 16, + versionMinor: 1, + versionYear: 2019 + }) + }) - const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') + poison(finder, 'regSearchKeys') + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by version number', function (t) { - t.plan(2) + it('VS2022 Preview with C++ workload', function () { + const msBuildPath = process.arch === 'arm64' + ? 'C:\\Program Files\\Microsoft Visual Studio\\2022\\' + + 'Community\\MSBuild\\Current\\Bin\\arm64\\MSBuild.exe' + : 'C:\\Program Files\\Microsoft Visual Studio\\2022\\' + + 'Community\\MSBuild\\Current\\Bin\\MSBuild.exe' + + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info, { + msBuild: msBuildPath, + path: + 'C:\\Program Files\\Microsoft Visual Studio\\2022\\Community', + sdk: '10.0.22621.0', + toolset: 'v143', + version: '17.4.33213.308', + versionMajor: 17, + versionMinor: 4, + versionYear: 2022 + }) + }) - const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2013) + poison(finder, 'regSearchKeys') + finder.msBuildPathExists = (path) => { + return true + } + finder.findVisualStudio2017OrNewer = (cb) => { + const file = path.join(__dirname, 'fixtures', + 'VS_2022_Community_workload.txt') + const data = fs.readFileSync(file) + finder.parseData(null, data, '', cb) + } + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2013 by installation path', function (t) { - t.plan(2) + function allVsVersions (finder) { + finder.findVisualStudio2017OrNewer = (cb) => { + const data0 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Unusable.txt'))) + const data1 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_BuildTools_minimal.txt'))) + const data2 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Community_workload.txt'))) + const data3 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2017_Express.txt'))) + const data4 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Preview.txt'))) + const data5 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_BuildTools_minimal.txt'))) + const data6 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2019_Community_workload.txt'))) + const data7 = JSON.parse(fs.readFileSync(path.join(__dirname, 'fixtures', + 'VS_2022_Community_workload.txt'))) + const data = JSON.stringify(data0.concat(data1, data2, data3, data4, + data5, data6, data7)) + finder.parseData(null, data, '', cb) + } + finder.regSearchKeys = (keys, value, addOpts, cb) => { + for (var i = 0; i < keys.length; ++i) { + const fullName = `${keys[i]}\\${value}` + switch (fullName) { + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + case 'HKLM\\Software\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + continue + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\12.0': + return cb(null, 'C:\\VS2013\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\12.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild12\\') + case 'HKLM\\Software\\Wow6432Node\\Microsoft\\VisualStudio\\SxS\\VC7\\14.0': + return cb(null, 'C:\\VS2015\\VC\\') + case 'HKLM\\Software\\Microsoft\\MSBuild\\ToolsVersions\\14.0\\MSBuildToolsPath': + return cb(null, 'C:\\MSBuild14\\') + default: + assert.fail(`unexpected search for registry value ${fullName}`) + } + } + return cb(new Error()) + } + } - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2013') + it('fail when looking for invalid path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'AABB', (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + allVsVersions(finder) + finder.findVisualStudio() + }) -test('look for VS2015 by version number', function (t) { - t.plan(2) + it('look for VS2013 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2013', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2013) + }) - const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2015) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('look for VS2013 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2013', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2013') + }) -test('look for VS2015 by installation path', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') + it('look for VS2015 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2015', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2015) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2017 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2017) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('look for VS2015 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) -test('look for VS2017 by installation path', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + it('look for VS2017 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2017', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2017) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by version number', function (t) { - t.plan(2) - - const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('look for VS2019 by installation path', function (t) { - t.plan(2) + it('look for VS2017 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + it('look for VS2019 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2019', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2019) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + allVsVersions(finder) + finder.findVisualStudio() + }) -test('msvs_version match should be case insensitive', function (t) { - t.plan(2) + it('look for VS2019 by installation path', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + it('look for VS2022 by version number', function () { + const finder = new TestVisualStudioFinder(semverV1, '2022', (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2022) }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('latest version should be found by default', function (t) { - t.plan(2) + finder.msBuildPathExists = (path) => { + return true + } - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.versionYear, 2019) + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('msvs_version match should be case insensitive', function () { + const finder = new TestVisualStudioFinder(semverV1, + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) + + allVsVersions(finder) + finder.findVisualStudio() + }) -test('run on a usable VS Command Prompt', function (t) { - t.plan(2) + it('latest version should be found by default', function () { + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.versionYear, 2022) + }) - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 - delete process.env.VSINSTALLDIR + finder.msBuildPathExists = (path) => { + return true + } - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('run on a usable VS Command Prompt', function () { + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + // VSINSTALLDIR is not defined on Visual C++ Build Tools 2015 + delete process.env.VSINSTALLDIR -test('VCINSTALLDIR match should be case insensitive', function (t) { - t.plan(2) - - process.env.VCINSTALLDIR = - 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) - -test('run on a unusable VS Command Prompt', function (t) { - t.plan(2) + it('VCINSTALLDIR match should be case insensitive', function () { + process.env.VCINSTALLDIR = + 'c:\\program files (x86)\\microsoft visual studio\\2019\\BUILDTOOLS\\VC' - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools') + }) - const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') + allVsVersions(finder) + finder.findVisualStudio() }) - allVsVersions(t, finder) - finder.findVisualStudio() -}) + it('run on a unusable VS Command Prompt', function () { + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildToolsUnusable\\VC' -test('run on a VS Command Prompt with matching msvs_version', function (t) { - t.plan(2) + const finder = new TestVisualStudioFinder(semverV1, null, (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') + }) - process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' + allVsVersions(finder) + finder.findVisualStudio() + }) - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.strictEqual(err, null) - t.deepEqual(info.path, 'C:\\VS2015') - }) + it('run on a VS Command Prompt with matching msvs_version', function () { + process.env.VCINSTALLDIR = 'C:\\VS2015\\VC' - allVsVersions(t, finder) - finder.findVisualStudio() -}) + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.strictEqual(err, null) + assert.deepStrictEqual(info.path, 'C:\\VS2015') + }) -test('run on a VS Command Prompt with mismatched msvs_version', function (t) { - t.plan(2) + allVsVersions(finder) + finder.findVisualStudio() + }) - process.env.VCINSTALLDIR = - 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' + it('run on a VS Command Prompt with mismatched msvs_version', function () { + process.env.VCINSTALLDIR = + 'C:\\Program Files (x86)\\Microsoft Visual Studio\\2019\\BuildTools\\VC' - const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', - (err, info) => { - t.ok(/find .* Visual Studio/i.test(err), 'expect error') - t.false(info, 'no data') - }) + const finder = new TestVisualStudioFinder(semverV1, 'C:\\VS2015', + (err, info) => { + assert.ok(/find .* Visual Studio/i.test(err), 'expect error') + assert.ok(!info, 'no data') + }) - allVsVersions(t, finder) - finder.findVisualStudio() + allVsVersions(finder) + finder.findVisualStudio() + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-install.js b/node_modules/npm/node_modules/node-gyp/test/test-install.js index 5039dc9..235acf5 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-install.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-install.js @@ -1,46 +1,137 @@ 'use strict' -const { test } = require('tap') -const { test: { install } } = require('../lib/install') +const { describe, it, after } = require('mocha') +const assert = require('assert') +const path = require('path') +const os = require('os') +const util = require('util') +const { test: { download, install } } = require('../lib/install') +const rimraf = require('rimraf') +const gyp = require('../lib/node-gyp') const log = require('npmlog') +const semver = require('semver') +const stream = require('stream') +const streamPipeline = util.promisify(stream.pipeline) log.level = 'error' // we expect a warning -test('EACCES retry once', async (t) => { - t.plan(3) - - const fs = { - promises: { - stat (_) { - const err = new Error() - err.code = 'EACCES' - t.ok(true) - throw err +describe('install', function () { + it('EACCES retry once', async () => { + const fs = { + promises: { + stat (_) { + const err = new Error() + err.code = 'EACCES' + assert.ok(true) + throw err + } } } - } - const Gyp = { - devDir: __dirname, - opts: { - ensure: true - }, - commands: { - install (argv, cb) { - install(fs, Gyp, argv).then(cb, cb) + const Gyp = { + devDir: __dirname, + opts: { + ensure: true }, - remove (_, cb) { - cb() + commands: { + install (argv, cb) { + install(fs, Gyp, argv).then(cb, cb) + }, + remove (_, cb) { + cb() + } } } - } - try { - await install(fs, Gyp, []) - } catch (err) { - t.ok(true) - if (/"pre" versions of node cannot be installed/.test(err.message)) { - t.ok(true) + try { + await install(fs, Gyp, []) + } catch (err) { + assert.ok(true) + if (/"pre" versions of node cannot be installed/.test(err.message)) { + assert.ok(true) + } + } + }) + + // only run these tests if we are running a version of Node with predictable version path behavior + const skipParallelInstallTests = process.env.FAST_TEST || + process.release.name !== 'node' || + semver.prerelease(process.version) !== null || + semver.satisfies(process.version, '<10') + + async function parallelInstallsTest (test, fs, devDir, prog) { + if (skipParallelInstallTests) { + return test.skip('Skipping parallel installs test due to test environment configuration') } + + after(async () => { + await util.promisify(rimraf)(devDir) + }) + + const expectedDir = path.join(devDir, process.version.replace(/^v/, '')) + await util.promisify(rimraf)(expectedDir) + + await Promise.all([ + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []), + install(fs, prog, []) + ]) } + + it('parallel installs (ensure=true)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.ensure = true + log.level = 'warn' + + await parallelInstallsTest(this, fs, devDir, prog) + }) + + it('parallel installs (ensure=false)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.ensure = false + log.level = 'warn' + + await parallelInstallsTest(this, fs, devDir, prog) + }) + + it('parallel installs (tarball)', async function () { + this.timeout(600000) + + const fs = require('graceful-fs') + const devDir = await util.promisify(fs.mkdtemp)(path.join(os.tmpdir(), 'node-gyp-test-')) + + const prog = gyp() + prog.parseArgv([]) + prog.devDir = devDir + prog.opts.tarball = path.join(devDir, 'node-headers.tar.gz') + log.level = 'warn' + + await streamPipeline( + (await download(prog, `https://nodejs.org/dist/${process.version}/node-${process.version}.tar.gz`)).body, + fs.createWriteStream(prog.opts.tarball) + ) + + await parallelInstallsTest(this, fs, devDir, prog) + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-options.js b/node_modules/npm/node_modules/node-gyp/test/test-options.js index 8a634f0..24e79c8 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-options.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-options.js @@ -1,42 +1,41 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const gyp = require('../lib/node-gyp') -test('options in environment', (t) => { - t.plan(1) +describe('options', function () { + it('options in environment', () => { + // `npm test` dumps a ton of npm_config_* variables in the environment. + Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .forEach((key) => { delete process.env[key] }) - // `npm test` dumps a ton of npm_config_* variables in the environment. - Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .forEach((key) => { delete process.env[key] }) + // in some platforms, certain keys are stubborn and cannot be removed + const keys = Object.keys(process.env) + .filter((key) => /^npm_config_/.test(key)) + .map((key) => key.substring('npm_config_'.length)) + .concat('argv', 'x') - // in some platforms, certain keys are stubborn and cannot be removed - const keys = Object.keys(process.env) - .filter((key) => /^npm_config_/.test(key)) - .map((key) => key.substring('npm_config_'.length)) - .concat('argv', 'x') + // Zero-length keys should get filtered out. + process.env.npm_config_ = '42' + // Other keys should get added. + process.env.npm_config_x = '42' + // Except loglevel. + process.env.npm_config_loglevel = 'debug' - // Zero-length keys should get filtered out. - process.env.npm_config_ = '42' - // Other keys should get added. - process.env.npm_config_x = '42' - // Except loglevel. - process.env.npm_config_loglevel = 'debug' + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. - const g = gyp() - g.parseArgv(['rebuild']) // Also sets opts.argv. + assert.deepStrictEqual(Object.keys(g.opts).sort(), keys.sort()) + }) - t.deepEqual(Object.keys(g.opts).sort(), keys.sort()) -}) - -test('options with spaces in environment', (t) => { - t.plan(1) - - process.env.npm_config_force_process_config = 'true' + it('options with spaces in environment', () => { + process.env.npm_config_force_process_config = 'true' - const g = gyp() - g.parseArgv(['rebuild']) // Also sets opts.argv. + const g = gyp() + g.parseArgv(['rebuild']) // Also sets opts.argv. - t.equal(g.opts['force-process-config'], 'true') + assert.strictEqual(g.opts['force-process-config'], 'true') + }) }) diff --git a/node_modules/npm/node_modules/node-gyp/test/test-process-release.js b/node_modules/npm/node_modules/node-gyp/test/test-process-release.js index c3ee070..0f40666 100644 --- a/node_modules/npm/node_modules/node-gyp/test/test-process-release.js +++ b/node_modules/npm/node_modules/node-gyp/test/test-process-release.js @@ -1,434 +1,401 @@ 'use strict' -const test = require('tap').test +const { describe, it } = require('mocha') +const assert = require('assert') const processRelease = require('../lib/process-release') -test('test process release - process.version = 0.8.20', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.8.20', null) - - t.equal(release.semver.version, '0.8.20') - delete release.semver - - t.deepEqual(release, { - version: '0.8.20', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.8.20/', - tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', - versionDir: '0.8.20', - ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } +describe('process-release', function () { + it('test process release - process.version = 0.8.20', function () { + var release = processRelease([], { opts: {} }, 'v0.8.20', null) + + assert.strictEqual(release.semver.version, '0.8.20') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.8.20', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.8.20/', + tarballUrl: 'https://nodejs.org/dist/v0.8.20/node-v0.8.20.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.8.20/SHASUMS256.txt', + versionDir: '0.8.20', + ia32: { libUrl: 'https://nodejs.org/dist/v0.8.20/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.8.20/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.8.20/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.version = 0.10.21', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.21', null) - - t.equal(release.semver.version, '0.10.21') - delete release.semver - - t.deepEqual(release, { - version: '0.10.21', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.21/', - tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', - versionDir: '0.10.21', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + it('test process release - process.version = 0.10.21', function () { + var release = processRelease([], { opts: {} }, 'v0.10.21', null) + + assert.strictEqual(release.semver.version, '0.10.21') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.21', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.21/', + tarballUrl: 'https://nodejs.org/dist/v0.10.21/node-v0.10.21.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.21/SHASUMS256.txt', + versionDir: '0.10.21', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.21/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.21/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.21/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// prior to -headers.tar.gz -test('test process release - process.version = 0.12.9', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.9', null) - - t.equal(release.semver.version, '0.12.9') - delete release.semver - - t.deepEqual(release, { - version: '0.12.9', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.9/', - tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', - versionDir: '0.12.9', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + // prior to -headers.tar.gz + it('test process release - process.version = 0.12.9', function () { + var release = processRelease([], { opts: {} }, 'v0.12.9', null) + + assert.strictEqual(release.semver.version, '0.12.9') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.12.9', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.9/', + tarballUrl: 'https://nodejs.org/dist/v0.12.9/node-v0.12.9.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.9/SHASUMS256.txt', + versionDir: '0.12.9', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.9/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.9/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.9/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// prior to -headers.tar.gz -test('test process release - process.version = 0.10.41', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.41', null) - - t.equal(release.semver.version, '0.10.41') - delete release.semver - - t.deepEqual(release, { - version: '0.10.41', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.41/', - tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', - versionDir: '0.10.41', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + // prior to -headers.tar.gz + it('test process release - process.version = 0.10.41', function () { + var release = processRelease([], { opts: {} }, 'v0.10.41', null) + + assert.strictEqual(release.semver.version, '0.10.41') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.41', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.41/', + tarballUrl: 'https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.41/SHASUMS256.txt', + versionDir: '0.10.41', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.41/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.41/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.41/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// has -headers.tar.gz -test('test process release - process.release ~ node@0.10.42', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.10.42', null) - - t.equal(release.semver.version, '0.10.42') - delete release.semver - - t.deepEqual(release, { - version: '0.10.42', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.42/', - tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', - versionDir: '0.10.42', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + // has -headers.tar.gz + it('test process release - process.release ~ node@0.10.42', function () { + var release = processRelease([], { opts: {} }, 'v0.10.42', null) + + assert.strictEqual(release.semver.version, '0.10.42') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.42', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.42/', + tarballUrl: 'https://nodejs.org/dist/v0.10.42/node-v0.10.42-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.42/SHASUMS256.txt', + versionDir: '0.10.42', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.42/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.42/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.42/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -// has -headers.tar.gz -test('test process release - process.release ~ node@0.12.10', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v0.12.10', null) - - t.equal(release.semver.version, '0.12.10') - delete release.semver - - t.deepEqual(release, { - version: '0.12.10', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.12.10/', - tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', - versionDir: '0.12.10', - ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + // has -headers.tar.gz + it('test process release - process.release ~ node@0.12.10', function () { + var release = processRelease([], { opts: {} }, 'v0.12.10', null) + + assert.strictEqual(release.semver.version, '0.12.10') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.12.10', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.12.10/', + tarballUrl: 'https://nodejs.org/dist/v0.12.10/node-v0.12.10-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.12.10/SHASUMS256.txt', + versionDir: '0.12.10', + ia32: { libUrl: 'https://nodejs.org/dist/v0.12.10/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.12.10/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.12.10/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ node@4.1.23', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v4.1.23/', + tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v4.1.23/', - tarballUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@4.1.23 / corp build', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://some.custom.location/', + tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23 / corp build', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ node@12.8.0 Windows', function () { + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' + }) + + assert.strictEqual(release.semver.version, '12.8.0') + delete release.semver + + assert.deepStrictEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://some.custom.location/', - tarballUrl: 'https://some.custom.location/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://some.custom.location/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://some.custom.location/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://some.custom.location/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://some.custom.location/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@12.8.0 Windows ARM64', function () { + var release = processRelease([], { opts: {} }, 'v12.8.0', { + name: 'node', + sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', + headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + }) + + assert.strictEqual(release.semver.version, '12.8.0') + delete release.semver + + assert.deepStrictEqual(release, { + version: '12.8.0', + name: 'node', + baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', + tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', + shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', + versionDir: '12.8.0', + ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@12.8.0 Windows', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib' - }) - - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ node@4.1.23 --target=0.10.40', function () { + var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '0.10.40') + delete release.semver + + assert.deepStrictEqual(release, { + version: '0.10.40', + name: 'node', + baseUrl: 'https://nodejs.org/dist/v0.10.40/', + tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', + shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', + versionDir: '0.10.40', + ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, + x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + }) }) -}) -test('test process release - process.release ~ node@12.8.0 Windows ARM64', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v12.8.0', { - name: 'node', - sourceUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0.tar.gz', - headersUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib' + it('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function () { + var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'https://foo.bar/baz/v4.1.23/', + tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '12.8.0') - delete release.semver - - t.deepEqual(release, { - version: '12.8.0', - name: 'node', - baseUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/', - tarballUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/node-v12.8.0-headers.tar.gz', - shasumsUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/SHASUMS256.txt', - versionDir: '12.8.0', - ia32: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://unofficial-builds.nodejs.org/download/release/v12.8.0/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - process.release ~ frankenstein@4.1.23', function () { + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', + tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) }) -}) -test('test process release - process.release ~ node@4.1.23 --target=0.10.40', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { target: '0.10.40' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + it('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function () { + var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { + name: 'frankenstein', + headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'frankenstein', + baseUrl: 'http://foo.bar/baz/v4.1.23/', + tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', + versionDir: 'frankenstein-4.1.23', + ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, + x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, + arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } + }) }) - t.equal(release.semver.version, '0.10.40') - delete release.semver - - t.deepEqual(release, { - version: '0.10.40', - name: 'node', - baseUrl: 'https://nodejs.org/dist/v0.10.40/', - tarballUrl: 'https://nodejs.org/dist/v0.10.40/node-v0.10.40.tar.gz', - shasumsUrl: 'https://nodejs.org/dist/v0.10.40/SHASUMS256.txt', - versionDir: '0.10.40', - ia32: { libUrl: 'https://nodejs.org/dist/v0.10.40/node.lib', libPath: 'node.lib' }, - x64: { libUrl: 'https://nodejs.org/dist/v0.10.40/x64/node.lib', libPath: 'x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/dist/v0.10.40/arm64/node.lib', libPath: 'arm64/node.lib' } + it('test process release - process.release ~ node@4.0.0-rc.4', function () { + var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) - -test('test process release - process.release ~ node@4.1.23 --dist-url=https://foo.bar/baz', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'https://foo.bar/baz' } }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'https://foo.bar/baz/v4.1.23/', - tarballUrl: 'https://foo.bar/baz/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://foo.bar/baz/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'https://frankensteinjs.org/dist/v4.1.23/', - tarballUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'https://frankensteinjs.org/dist/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'https://frankensteinjs.org/dist/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ frankenstein@4.1.23 --dist-url=http://foo.bar/baz/', function (t) { - t.plan(2) - - var release = processRelease([], { opts: { 'dist-url': 'http://foo.bar/baz/' } }, 'v4.1.23', { - name: 'frankenstein', - headersUrl: 'https://frankensteinjs.org/dist/v4.1.23/frankenstein-v4.1.23.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'frankenstein', - baseUrl: 'http://foo.bar/baz/v4.1.23/', - tarballUrl: 'http://foo.bar/baz/v4.1.23/frankenstein-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/baz/v4.1.23/SHASUMS256.txt', - versionDir: 'frankenstein-4.1.23', - ia32: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x86/frankenstein.lib', libPath: 'win-x86/frankenstein.lib' }, - x64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-x64/frankenstein.lib', libPath: 'win-x64/frankenstein.lib' }, - arm64: { libUrl: 'http://foo.bar/baz/v4.1.23/win-arm64/frankenstein.lib', libPath: 'win-arm64/frankenstein.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4', function (t) { - t.plan(2) - - var release = processRelease([], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function (t) { - t.plan(2) + it('test process release - process.release ~ node@4.0.0-rc.4 passed as argv[0]', function () { // note the missing 'v' on the arg, it should normalise when checking - // whether we're on the default or not - var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + // whether we're on the default or not + var release = processRelease(['4.0.0-rc.4'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) -}) - -test('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function (t) { - t.plan(2) + it('test process release - process.release ~ node@4.0.0-rc.4 - bogus string passed as argv[0]', function () { // additional arguments can be passed in on the commandline that should be ignored if they - // are not specifying a valid version @ position 0 - var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { - name: 'node', - headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + // are not specifying a valid version @ position 0 + var release = processRelease(['this is no version!'], { opts: {} }, 'v4.0.0-rc.4', { + name: 'node', + headersUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.0.0-rc.4') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.0.0-rc.4', + name: 'node', + baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', + tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', + shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', + versionDir: '4.0.0-rc.4', + ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) }) - t.equal(release.semver.version, '4.0.0-rc.4') - delete release.semver - - t.deepEqual(release, { - version: '4.0.0-rc.4', - name: 'node', - baseUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/', - tarballUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/node-v4.0.0-rc.4-headers.tar.gz', - shasumsUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/SHASUMS256.txt', - versionDir: '4.0.0-rc.4', - ia32: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'https://nodejs.org/download/rc/v4.0.0-rc.4/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + it('test process release - NODEJS_ORG_MIRROR', function () { + process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' + + var release = processRelease([], { opts: {} }, 'v4.1.23', { + name: 'node', + headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' + }) + + assert.strictEqual(release.semver.version, '4.1.23') + delete release.semver + + assert.deepStrictEqual(release, { + version: '4.1.23', + name: 'node', + baseUrl: 'http://foo.bar/v4.1.23/', + tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', + shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', + versionDir: '4.1.23', + ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, + x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, + arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } + }) + + delete process.env.NODEJS_ORG_MIRROR }) }) - -test('test process release - NODEJS_ORG_MIRROR', function (t) { - t.plan(2) - - process.env.NODEJS_ORG_MIRROR = 'http://foo.bar' - - var release = processRelease([], { opts: {} }, 'v4.1.23', { - name: 'node', - headersUrl: 'https://nodejs.org/dist/v4.1.23/node-v4.1.23-headers.tar.gz' - }) - - t.equal(release.semver.version, '4.1.23') - delete release.semver - - t.deepEqual(release, { - version: '4.1.23', - name: 'node', - baseUrl: 'http://foo.bar/v4.1.23/', - tarballUrl: 'http://foo.bar/v4.1.23/node-v4.1.23-headers.tar.gz', - shasumsUrl: 'http://foo.bar/v4.1.23/SHASUMS256.txt', - versionDir: '4.1.23', - ia32: { libUrl: 'http://foo.bar/v4.1.23/win-x86/node.lib', libPath: 'win-x86/node.lib' }, - x64: { libUrl: 'http://foo.bar/v4.1.23/win-x64/node.lib', libPath: 'win-x64/node.lib' }, - arm64: { libUrl: 'http://foo.bar/v4.1.23/win-arm64/node.lib', libPath: 'win-arm64/node.lib' } - }) - - delete process.env.NODEJS_ORG_MIRROR -}) diff --git a/node_modules/npm/node_modules/node-gyp/update-gyp.py b/node_modules/npm/node_modules/node-gyp/update-gyp.py index 19524bd..70e2d10 100755 --- a/node_modules/npm/node_modules/node-gyp/update-gyp.py +++ b/node_modules/npm/node_modules/node-gyp/update-gyp.py @@ -49,7 +49,7 @@ def safe_extract(tar, path=".", members=None, *, numeric_owner=False): if not is_within_directory(path, member_path): raise Exception("Attempted Path Traversal in Tar File") - tar.extractall(path, members, numeric_owner) + tar.extractall(path, members, numeric_owner=numeric_owner) safe_extract(tar_ref, unzip_target) diff --git a/node_modules/npm/node_modules/nopt/bin/nopt.js b/node_modules/npm/node_modules/nopt/bin/nopt.js index bb04291..6ed2082 100755 --- a/node_modules/npm/node_modules/nopt/bin/nopt.js +++ b/node_modules/npm/node_modules/nopt/bin/nopt.js @@ -1,7 +1,8 @@ #!/usr/bin/env node -var nopt = require('../lib/nopt') -var path = require('path') -var types = { num: Number, +const nopt = require('../lib/nopt') +const path = require('path') +console.log('parsed', nopt({ + num: Number, bool: Boolean, help: Boolean, list: Array, @@ -13,8 +14,8 @@ var types = { num: Number, config: Boolean, length: Number, file: path, -} -var shorthands = { s: ['--str', 'astring'], +}, { + s: ['--str', 'astring'], b: ['--bool'], nb: ['--no-bool'], tft: ['--bool-list', '--no-bool-list', '--bool-list', 'true'], @@ -25,32 +26,4 @@ var shorthands = { s: ['--str', 'astring'], c: ['--config'], l: ['--length'], f: ['--file'], -} -var parsed = nopt(types - , shorthands - , process.argv - , 2) - -console.log('parsed', parsed) - -if (parsed.help) { - console.log('') - console.log('nopt cli tester') - console.log('') - console.log('types') - console.log(Object.keys(types).map(function M (t) { - var type = types[t] - if (Array.isArray(type)) { - return [t, type.map(function (mappedType) { - return mappedType.name - })] - } - return [t, type && type.name] - }).reduce(function (s, i) { - s[i[0]] = i[1] - return s - }, {})) - console.log('') - console.log('shorthands') - console.log(shorthands) -} +}, process.argv, 2)) diff --git a/node_modules/npm/node_modules/nopt/lib/debug.js b/node_modules/npm/node_modules/nopt/lib/debug.js index 194d0c6..e62198e 100644 --- a/node_modules/npm/node_modules/nopt/lib/debug.js +++ b/node_modules/npm/node_modules/nopt/lib/debug.js @@ -1,6 +1,4 @@ /* istanbul ignore next */ module.exports = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG - ? function () { - console.error.apply(console, arguments) - } - : function () {} + ? (...a) => console.error(...a) + : () => {} diff --git a/node_modules/npm/node_modules/nopt/lib/nopt-lib.js b/node_modules/npm/node_modules/nopt/lib/nopt-lib.js index 89d269f..d3d1de0 100644 --- a/node_modules/npm/node_modules/nopt/lib/nopt-lib.js +++ b/node_modules/npm/node_modules/nopt/lib/nopt-lib.js @@ -1,21 +1,47 @@ -var abbrev = require('abbrev') +const abbrev = require('abbrev') const debug = require('./debug') const defaultTypeDefs = require('./type-defs') -function nopt (args, { types, shorthands, typeDefs, invalidHandler }) { +const hasOwn = (o, k) => Object.prototype.hasOwnProperty.call(o, k) + +const getType = (k, { types, dynamicTypes }) => { + let hasType = hasOwn(types, k) + let type = types[k] + if (!hasType && typeof dynamicTypes === 'function') { + const matchedType = dynamicTypes(k) + if (matchedType !== undefined) { + type = matchedType + hasType = true + } + } + return [hasType, type] +} + +const isTypeDef = (type, def) => def && type === def +const hasTypeDef = (type, def) => def && type.indexOf(def) !== -1 +const doesNotHaveTypeDef = (type, def) => def && !hasTypeDef(type, def) + +function nopt (args, { + types, + shorthands, + typeDefs, + invalidHandler, + typeDefault, + dynamicTypes, +} = {}) { debug(types, shorthands, args, typeDefs) - var data = {} - var argv = { + const data = {} + const argv = { remain: [], cooked: args, original: args.slice(0), } - parse(args, data, argv.remain, { typeDefs, types, shorthands }) + parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands }) // now data is full - clean(data, { types, typeDefs, invalidHandler }) + clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault }) data.argv = argv Object.defineProperty(data.argv, 'toString', { @@ -28,30 +54,48 @@ function nopt (args, { types, shorthands, typeDefs, invalidHandler }) { return data } -function clean (data, { types, typeDefs, invalidHandler }) { - const StringType = typeDefs.String.type - const NumberType = typeDefs.Number.type - const ArrayType = typeDefs.Array.type - const BooleanType = typeDefs.Boolean.type - const DateType = typeDefs.Date.type +function clean (data, { + types = {}, + typeDefs = {}, + dynamicTypes, + invalidHandler, + typeDefault, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + const DateType = typeDefs.Date?.type + + const hasTypeDefault = typeof typeDefault !== 'undefined' + if (!hasTypeDefault) { + typeDefault = [false, true, null] + if (StringType) { + typeDefault.push(StringType) + } + if (ArrayType) { + typeDefault.push(ArrayType) + } + } - var remove = {} - var typeDefault = [false, true, null, StringType, ArrayType] + const remove = {} - Object.keys(data).forEach(function (k) { + Object.keys(data).forEach((k) => { if (k === 'argv') { return } - var val = data[k] - var isArray = Array.isArray(val) - var type = types[k] + let val = data[k] + debug('val=%j', val) + const isArray = Array.isArray(val) + let [hasType, rawType] = getType(k, { types, dynamicTypes }) + let type = rawType if (!isArray) { val = [val] } if (!type) { type = typeDefault } - if (type === ArrayType) { + if (isTypeDef(type, ArrayType)) { type = typeDefault.concat(ArrayType) } if (!Array.isArray(type)) { @@ -60,57 +104,62 @@ function clean (data, { types, typeDefs, invalidHandler }) { debug('val=%j', val) debug('types=', type) - val = val.map(function (v) { + val = val.map((v) => { // if it's an unknown value, then parse false/true/null/numbers/dates if (typeof v === 'string') { debug('string %j', v) v = v.trim() if ((v === 'null' && ~type.indexOf(null)) || (v === 'true' && - (~type.indexOf(true) || ~type.indexOf(BooleanType))) + (~type.indexOf(true) || hasTypeDef(type, BooleanType))) || (v === 'false' && - (~type.indexOf(false) || ~type.indexOf(BooleanType)))) { + (~type.indexOf(false) || hasTypeDef(type, BooleanType)))) { v = JSON.parse(v) debug('jsonable %j', v) - } else if (~type.indexOf(NumberType) && !isNaN(v)) { + } else if (hasTypeDef(type, NumberType) && !isNaN(v)) { debug('convert to number', v) v = +v - } else if (~type.indexOf(DateType) && !isNaN(Date.parse(v))) { + } else if (hasTypeDef(type, DateType) && !isNaN(Date.parse(v))) { debug('convert to date', v) v = new Date(v) } } - if (!Object.prototype.hasOwnProperty.call(types, k)) { - return v + if (!hasType) { + if (!hasTypeDefault) { + return v + } + // if the default type has been passed in then we want to validate the + // unknown data key instead of bailing out earlier. we also set the raw + // type which is passed to the invalid handler so that it can be + // determined if during validation if it is unknown vs invalid + rawType = typeDefault } // allow `--no-blah` to set 'blah' to null if null is allowed if (v === false && ~type.indexOf(null) && - !(~type.indexOf(false) || ~type.indexOf(BooleanType))) { + !(~type.indexOf(false) || hasTypeDef(type, BooleanType))) { v = null } - var d = {} + const d = {} d[k] = v - debug('prevalidated val', d, v, types[k]) - if (!validate(d, k, v, types[k], { typeDefs })) { + debug('prevalidated val', d, v, rawType) + if (!validate(d, k, v, rawType, { typeDefs })) { if (invalidHandler) { - invalidHandler(k, v, types[k], data) + invalidHandler(k, v, rawType, data) } else if (invalidHandler !== false) { - debug('invalid: ' + k + '=' + v, types[k]) + debug('invalid: ' + k + '=' + v, rawType) } return remove } - debug('validated v', d, v, types[k]) + debug('validated v', d, v, rawType) return d[k] - }).filter(function (v) { - return v !== remove - }) + }).filter((v) => v !== remove) // if we allow Array specifically, then an empty array is how we // express 'no value here', not null. Allow it. - if (!val.length && type.indexOf(ArrayType) === -1) { + if (!val.length && doesNotHaveTypeDef(type, ArrayType)) { debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(ArrayType)) delete data[k] } else if (isArray) { @@ -124,12 +173,12 @@ function clean (data, { types, typeDefs, invalidHandler }) { }) } -function validate (data, k, val, type, { typeDefs }) { - const ArrayType = typeDefs.Array.type +function validate (data, k, val, type, { typeDefs } = {}) { + const ArrayType = typeDefs?.Array?.type // arrays are lists of types. if (Array.isArray(type)) { for (let i = 0, l = type.length; i < l; i++) { - if (type[i] === ArrayType) { + if (isTypeDef(type[i], ArrayType)) { continue } if (validate(data, k, val, type[i], { typeDefs })) { @@ -141,7 +190,7 @@ function validate (data, k, val, type, { typeDefs }) { } // an array of anything? - if (type === ArrayType) { + if (isTypeDef(type, ArrayType)) { return true } @@ -166,17 +215,17 @@ function validate (data, k, val, type, { typeDefs }) { } // now go through the list of typeDefs, validate against each one. - var ok = false - var types = Object.keys(typeDefs) + let ok = false + const types = Object.keys(typeDefs) for (let i = 0, l = types.length; i < l; i++) { debug('test type %j %j %j', k, val, types[i]) - var t = typeDefs[types[i]] + const t = typeDefs[types[i]] if (t && ( (type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type) )) { - var d = {} + const d = {} ok = t.validate(d, k, val) !== false val = d[k] if (ok) { @@ -193,19 +242,25 @@ function validate (data, k, val, type, { typeDefs }) { return ok } -function parse (args, data, remain, { typeDefs, types, shorthands }) { - const StringType = typeDefs.String.type - const NumberType = typeDefs.String.type - const ArrayType = typeDefs.Array.type - const BooleanType = typeDefs.Boolean.type +function parse (args, data, remain, { + types = {}, + typeDefs = {}, + shorthands = {}, + dynamicTypes, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type debug('parse', args, data, remain) - var abbrevs = abbrev(Object.keys(types)) - var shortAbbr = abbrev(Object.keys(shorthands)) + const abbrevs = abbrev(Object.keys(types)) + debug('abbrevs=%j', abbrevs) + const shortAbbr = abbrev(Object.keys(shorthands)) - for (var i = 0; i < args.length; i++) { - var arg = args[i] + for (let i = 0; i < args.length; i++) { + let arg = args[i] debug('arg', arg) if (arg.match(/^-{2,}$/)) { @@ -215,22 +270,21 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { args[i] = '--' break } - var hadEq = false + let hadEq = false if (arg.charAt(0) === '-' && arg.length > 1) { - var at = arg.indexOf('=') + const at = arg.indexOf('=') if (at > -1) { hadEq = true - var v = arg.slice(at + 1) + const v = arg.slice(at + 1) arg = arg.slice(0, at) args.splice(i, 1, arg, v) } // see if it's a shorthand // if so, splice and back up to re-parse it. - var shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) + const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) debug('arg=%j shRes=%j', arg, shRes) if (shRes) { - debug(arg, shRes) args.splice.apply(args, [i, 1].concat(shRes)) if (arg !== shRes[0]) { i-- @@ -238,7 +292,7 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { } } arg = arg.replace(/^-+/, '') - var no = null + let no = null while (arg.toLowerCase().indexOf('no-') === 0) { no = !no arg = arg.slice(3) @@ -248,33 +302,30 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { arg = abbrevs[arg] } - var argType = types[arg] - var isTypeArray = Array.isArray(argType) + let [hasType, argType] = getType(arg, { types, dynamicTypes }) + let isTypeArray = Array.isArray(argType) if (isTypeArray && argType.length === 1) { isTypeArray = false argType = argType[0] } - var isArray = argType === ArrayType || - isTypeArray && argType.indexOf(ArrayType) !== -1 + let isArray = isTypeDef(argType, ArrayType) || + isTypeArray && hasTypeDef(argType, ArrayType) // allow unknown things to be arrays if specified multiple times. - if ( - !Object.prototype.hasOwnProperty.call(types, arg) && - Object.prototype.hasOwnProperty.call(data, arg) - ) { + if (!hasType && hasOwn(data, arg)) { if (!Array.isArray(data[arg])) { data[arg] = [data[arg]] } isArray = true } - var val - var la = args[i + 1] + let val + let la = args[i + 1] - var isBool = typeof no === 'boolean' || - argType === BooleanType || - isTypeArray && argType.indexOf(BooleanType) !== -1 || + const isBool = typeof no === 'boolean' || + isTypeDef(argType, BooleanType) || + isTypeArray && hasTypeDef(argType, BooleanType) || (typeof argType === 'undefined' && !hadEq) || (la === 'false' && (argType === null || @@ -305,11 +356,11 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { i++ } else if (!la.match(/^-{2,}[^-]/) && !isNaN(la) && - ~argType.indexOf(NumberType)) { + hasTypeDef(argType, NumberType)) { // number val = +la i++ - } else if (!la.match(/^-[^-]/) && ~argType.indexOf(StringType)) { + } else if (!la.match(/^-[^-]/) && hasTypeDef(argType, StringType)) { // string val = la i++ @@ -325,7 +376,7 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { continue } - if (argType === StringType) { + if (isTypeDef(argType, StringType)) { if (la === undefined) { la = '' } else if (la.match(/^-{1,2}[^-]+/)) { @@ -353,7 +404,26 @@ function parse (args, data, remain, { typeDefs, types, shorthands }) { } } -function resolveShort (arg, shortAbbr, abbrevs, { shorthands }) { +const SINGLES = Symbol('singles') +const singleCharacters = (arg, shorthands) => { + let singles = shorthands[SINGLES] + if (!singles) { + singles = Object.keys(shorthands).filter((s) => s.length === 1).reduce((l, r) => { + l[r] = true + return l + }, {}) + shorthands[SINGLES] = singles + debug('shorthand singles', singles) + } + const chrs = arg.split('').filter((c) => singles[c]) + return chrs.join('') === arg ? chrs : null +} + +function resolveShort (arg, ...rest) { + const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {} + const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands)) + const abbrevs = rest[1] ?? abbrev(Object.keys(types)) + // handle single-char shorthands glommed together, like // npm ls -glp, but only if there is one dash, and only if // all of the chars are single-char shorthands, and it's @@ -376,28 +446,9 @@ function resolveShort (arg, shortAbbr, abbrevs, { shorthands }) { } // first check to see if this arg is a set of single-char shorthands - var singles = shorthands.___singles - if (!singles) { - singles = Object.keys(shorthands).filter(function (s) { - return s.length === 1 - }).reduce(function (l, r) { - l[r] = true - return l - }, {}) - shorthands.___singles = singles - debug('shorthand singles', singles) - } - - var chrs = arg.split('').filter(function (c) { - return singles[c] - }) - - if (chrs.join('') === arg) { - return chrs.map(function (c) { - return shorthands[c] - }).reduce(function (l, r) { - return l.concat(r) - }, []) + const chrs = singleCharacters(arg, shorthands) + if (chrs) { + return chrs.map((c) => shorthands[c]).reduce((l, r) => l.concat(r), []) } // if it's an arg abbrev, and not a literal shorthand, then prefer the arg diff --git a/node_modules/npm/node_modules/nopt/lib/nopt.js b/node_modules/npm/node_modules/nopt/lib/nopt.js index 70fd809..37f01a0 100644 --- a/node_modules/npm/node_modules/nopt/lib/nopt.js +++ b/node_modules/npm/node_modules/nopt/lib/nopt.js @@ -12,9 +12,9 @@ exports.clean = clean exports.typeDefs = defaultTypeDefs exports.lib = lib -function nopt (types = {}, shorthands = {}, args = process.argv, slice = 2) { +function nopt (types, shorthands, args = process.argv, slice = 2) { return lib.nopt(args.slice(slice), { - types, + types: types || {}, shorthands: shorthands || {}, typeDefs: exports.typeDefs, invalidHandler: exports.invalidHandler, @@ -23,7 +23,7 @@ function nopt (types = {}, shorthands = {}, args = process.argv, slice = 2) { function clean (data, types, typeDefs = exports.typeDefs) { return lib.clean(data, { - types, + types: types || {}, typeDefs, invalidHandler: exports.invalidHandler, }) diff --git a/node_modules/npm/node_modules/nopt/lib/type-defs.js b/node_modules/npm/node_modules/nopt/lib/type-defs.js index 6acf5e0..608352e 100644 --- a/node_modules/npm/node_modules/nopt/lib/type-defs.js +++ b/node_modules/npm/node_modules/nopt/lib/type-defs.js @@ -1,7 +1,7 @@ -var url = require('url') -var path = require('path') -var Stream = require('stream').Stream -var os = require('os') +const url = require('url') +const path = require('path') +const Stream = require('stream').Stream +const os = require('os') const debug = require('./debug') function validateString (data, k, val) { @@ -18,9 +18,9 @@ function validatePath (data, k, val) { val = String(val) - var isWin = process.platform === 'win32' - var homePattern = isWin ? /^~(\/|\\)/ : /^~\// - var home = os.homedir() + const isWin = process.platform === 'win32' + const homePattern = isWin ? /^~(\/|\\)/ : /^~\// + const home = os.homedir() if (home && val.match(homePattern)) { data[k] = path.resolve(home, val.slice(2)) @@ -39,7 +39,7 @@ function validateNumber (data, k, val) { } function validateDate (data, k, val) { - var s = Date.parse(val) + const s = Date.parse(val) debug('validate Date %j %j %j', k, val, s) if (isNaN(s)) { return false diff --git a/node_modules/npm/node_modules/nopt/package.json b/node_modules/npm/node_modules/nopt/package.json index a61cae4..01b7de8 100644 --- a/node_modules/npm/node_modules/nopt/package.json +++ b/node_modules/npm/node_modules/nopt/package.json @@ -1,6 +1,6 @@ { "name": "nopt", - "version": "7.1.0", + "version": "7.2.0", "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", "author": "GitHub Inc.", "main": "lib/nopt.js", @@ -26,13 +26,10 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.12.0", + "@npmcli/template-oss": "4.15.1", "tap": "^16.3.0" }, "tap": { - "lines": 91, - "branches": 87, - "statements": 91, "nyc-arg": [ "--exclude", "tap-snapshots/**" @@ -48,6 +45,7 @@ "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.12.0" + "version": "4.15.1", + "publish": true } } diff --git a/node_modules/npm/node_modules/pacote/README.md b/node_modules/npm/node_modules/pacote/README.md index 3db6f96..17c027d 100644 --- a/node_modules/npm/node_modules/pacote/README.md +++ b/node_modules/npm/node_modules/pacote/README.md @@ -175,6 +175,9 @@ resolved, and other properties, as they are determined. * `verifyAttestations` A boolean that will make pacote verify Sigstore attestations, if present. There must be a configured `_keys` entry in the config that is scoped to the registry the manifest is being fetched from. +* `tufCache` Where to store metadata/target files when retrieving the package + attestation key material via TUF. Defaults to the same cache directory that + npm will use by default, based on platform and environment. ### Advanced API diff --git a/node_modules/npm/node_modules/pacote/lib/fetcher.js b/node_modules/npm/node_modules/pacote/lib/fetcher.js index 6694a57..f961a45 100644 --- a/node_modules/npm/node_modules/pacote/lib/fetcher.js +++ b/node_modules/npm/node_modules/pacote/lib/fetcher.js @@ -61,7 +61,8 @@ class FetcherBase { // by adding/modifying the integrity value. this.opts = { ...opts } - this.cache = opts.cache || cacheDir() + this.cache = opts.cache || cacheDir().cacache + this.tufCache = opts.tufCache || cacheDir().tufcache this.resolved = opts.resolved || null // default to caching/verifying with sha512, that's what we usually have diff --git a/node_modules/npm/node_modules/pacote/lib/registry.js b/node_modules/npm/node_modules/pacote/lib/registry.js index 625bedc..34d9b2b 100644 --- a/node_modules/npm/node_modules/pacote/lib/registry.js +++ b/node_modules/npm/node_modules/pacote/lib/registry.js @@ -295,7 +295,10 @@ class RegistryFetcher extends Fetcher { // // Publish attestations are signed with a keyid so we need to // specify a public key from the keys endpoint: `registry-host.tld/-/npm/v1/keys` - const options = { keySelector: publicKey ? () => publicKey.pemkey : undefined } + const options = { + tufCachePath: this.tufCache, + keySelector: publicKey ? () => publicKey.pemkey : undefined, + } await sigstore.verify(bundle, null, options) } catch (e) { throw Object.assign(new Error( diff --git a/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js b/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js index 4236213..ac83b17 100644 --- a/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js +++ b/node_modules/npm/node_modules/pacote/lib/util/cache-dir.js @@ -8,5 +8,8 @@ module.exports = (fakePlatform = false) => { const platform = fakePlatform || process.platform const cacheExtra = platform === 'win32' ? 'npm-cache' : '.npm' const cacheRoot = (platform === 'win32' && process.env.LOCALAPPDATA) || home - return resolve(cacheRoot, cacheExtra, '_cacache') + return { + cacache: resolve(cacheRoot, cacheExtra, '_cacache'), + tufcache: resolve(cacheRoot, cacheExtra, '_tuf'), + } } diff --git a/node_modules/npm/node_modules/pacote/package.json b/node_modules/npm/node_modules/pacote/package.json index 48f2bb0..bc8d984 100644 --- a/node_modules/npm/node_modules/pacote/package.json +++ b/node_modules/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "15.1.3", + "version": "15.2.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { diff --git a/node_modules/npm/node_modules/path-scurry/package.json b/node_modules/npm/node_modules/path-scurry/package.json index 677bf1c..5b90082 100644 --- a/node_modules/npm/node_modules/path-scurry/package.json +++ b/node_modules/npm/node_modules/path-scurry/package.json @@ -1,6 +1,6 @@ { "name": "path-scurry", - "version": "1.9.1", + "version": "1.9.2", "description": "walk paths fast and efficiently", "author": "Isaac Z. Schlueter (https://blog.izs.me)", "main": "./dist/cjs/index.js", @@ -82,6 +82,6 @@ }, "dependencies": { "lru-cache": "^9.1.1", - "minipass": "^5.0.0 || ^6.0.0" + "minipass": "^5.0.0 || ^6.0.2" } } diff --git a/node_modules/npm/node_modules/read-package-json/lib/read-json.js b/node_modules/npm/node_modules/read-package-json/lib/read-json.js index aaf24e9..d35f09e 100644 --- a/node_modules/npm/node_modules/read-package-json/lib/read-json.js +++ b/node_modules/npm/node_modules/read-package-json/lib/read-json.js @@ -352,7 +352,7 @@ function bins (file, data, cb) { return cb(null, data) } - m = path.resolve(path.dirname(file), m) + m = path.resolve(path.dirname(file), path.join('.', path.join('/', m))) glob('**', { cwd: m }) .then(binsGlob => bins_(file, data, binsGlob, cb)) .catch(er => cb(er)) diff --git a/node_modules/npm/node_modules/read-package-json/package.json b/node_modules/npm/node_modules/read-package-json/package.json index a2f9308..90ab321 100644 --- a/node_modules/npm/node_modules/read-package-json/package.json +++ b/node_modules/npm/node_modules/read-package-json/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json", - "version": "6.0.3", + "version": "6.0.4", "author": "GitHub Inc.", "description": "The thing npm uses to read package.json files with semantics and defaults and validation", "repository": { @@ -30,7 +30,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.1" }, "license": "ISC", @@ -42,10 +42,10 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" }, "tap": { - "branches": 68, - "functions": 74, - "lines": 74, - "statements": 74, + "branches": 73, + "functions": 77, + "lines": 77, + "statements": 77, "nyc-arg": [ "--exclude", "tap-snapshots/**" @@ -53,7 +53,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.15.1", "publish": "true" } } diff --git a/node_modules/npm/node_modules/safe-buffer/index.js b/node_modules/npm/node_modules/safe-buffer/index.js index 22438da..f8d3ec9 100644 --- a/node_modules/npm/node_modules/safe-buffer/index.js +++ b/node_modules/npm/node_modules/safe-buffer/index.js @@ -1,3 +1,4 @@ +/*! safe-buffer. MIT License. Feross Aboukhadijeh */ /* eslint-disable node/no-deprecated-api */ var buffer = require('buffer') var Buffer = buffer.Buffer @@ -20,6 +21,8 @@ function SafeBuffer (arg, encodingOrOffset, length) { return Buffer(arg, encodingOrOffset, length) } +SafeBuffer.prototype = Object.create(Buffer.prototype) + // Copy static methods from Buffer copyProps(Buffer, SafeBuffer) diff --git a/node_modules/npm/node_modules/safe-buffer/package.json b/node_modules/npm/node_modules/safe-buffer/package.json index 623fbc3..f2869e2 100644 --- a/node_modules/npm/node_modules/safe-buffer/package.json +++ b/node_modules/npm/node_modules/safe-buffer/package.json @@ -1,18 +1,18 @@ { "name": "safe-buffer", "description": "Safer Node.js Buffer API", - "version": "5.1.2", + "version": "5.2.1", "author": { "name": "Feross Aboukhadijeh", "email": "feross@feross.org", - "url": "http://feross.org" + "url": "https://feross.org" }, "bugs": { "url": "https://github.com/feross/safe-buffer/issues" }, "devDependencies": { "standard": "*", - "tape": "^4.0.0" + "tape": "^5.0.0" }, "homepage": "https://github.com/feross/safe-buffer", "keywords": [ @@ -33,5 +33,19 @@ }, "scripts": { "test": "standard && tape test/*.js" - } + }, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] } diff --git a/node_modules/npm/node_modules/semver/classes/comparator.js b/node_modules/npm/node_modules/semver/classes/comparator.js index 2146c88..3d39c0e 100644 --- a/node_modules/npm/node_modules/semver/classes/comparator.js +++ b/node_modules/npm/node_modules/semver/classes/comparator.js @@ -16,6 +16,7 @@ class Comparator { } } + comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose @@ -133,7 +134,7 @@ class Comparator { module.exports = Comparator const parseOptions = require('../internal/parse-options') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const cmp = require('../functions/cmp') const debug = require('../internal/debug') const SemVer = require('./semver') diff --git a/node_modules/npm/node_modules/semver/classes/range.js b/node_modules/npm/node_modules/semver/classes/range.js index d9e866d..53c2540 100644 --- a/node_modules/npm/node_modules/semver/classes/range.js +++ b/node_modules/npm/node_modules/semver/classes/range.js @@ -26,19 +26,26 @@ class Range { this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease - // First, split based on boolean or || + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. this.raw = range - this.set = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw .split('||') // map the range to a 2d array of comparators - .map(r => this.parseRange(r.trim())) + .map(r => this.parseRange(r)) // throw out any comparator lists that are empty // this generally means that it was not a valid range, which is allowed // in loose mode, but will still throw if the WHOLE range is invalid. .filter(c => c.length) if (!this.set.length) { - throw new TypeError(`Invalid SemVer Range: ${range}`) + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) } // if we have any that are not the null set, throw out null sets. @@ -64,9 +71,7 @@ class Range { format () { this.range = this.set - .map((comps) => { - return comps.join(' ').trim() - }) + .map((comps) => comps.join(' ').trim()) .join('||') .trim() return this.range @@ -77,8 +82,6 @@ class Range { } parseRange (range) { - range = range.trim() - // memoize range parsing for performance. // this is a very hot path, and fully deterministic. const memoOpts = @@ -105,9 +108,6 @@ class Range { // `^ 1.2.3` => `^1.2.3` range = range.replace(re[t.CARETTRIM], caretTrimReplace) - // normalize spaces - range = range.split(/\s+/).join(' ') - // At this point, the range is completely trimmed and // ready to be split into comparators. @@ -203,7 +203,7 @@ const Comparator = require('./comparator') const debug = require('../internal/debug') const SemVer = require('./semver') const { - re, + safeRe: re, t, comparatorTrimReplace, tildeTrimReplace, @@ -257,10 +257,13 @@ const isX = id => !id || id.toLowerCase() === 'x' || id === '*' // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 // ~0.0.1 --> >=0.0.1 <0.1.0-0 -const replaceTildes = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceTilde(c, options) - }).join(' ') +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} const replaceTilde = (comp, options) => { const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] @@ -298,10 +301,13 @@ const replaceTilde = (comp, options) => { // ^1.2.0 --> >=1.2.0 <2.0.0-0 // ^0.0.1 --> >=0.0.1 <0.0.2-0 // ^0.1.0 --> >=0.1.0 <0.2.0-0 -const replaceCarets = (comp, options) => - comp.trim().split(/\s+/).map((c) => { - return replaceCaret(c, options) - }).join(' ') +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} const replaceCaret = (comp, options) => { debug('caret', comp, options) @@ -358,9 +364,10 @@ const replaceCaret = (comp, options) => { const replaceXRanges = (comp, options) => { debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map((c) => { - return replaceXRange(c, options) - }).join(' ') + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') } const replaceXRange = (comp, options) => { @@ -443,12 +450,15 @@ const replaceXRange = (comp, options) => { const replaceStars = (comp, options) => { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[t.STAR], '') + return comp + .trim() + .replace(re[t.STAR], '') } const replaceGTE0 = (comp, options) => { debug('replaceGTE0', comp, options) - return comp.trim() + return comp + .trim() .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') } @@ -486,7 +496,7 @@ const hyphenReplace = incPr => ($0, to = `<=${to}` } - return (`${from} ${to}`).trim() + return `${from} ${to}`.trim() } const testSet = (set, version, options) => { diff --git a/node_modules/npm/node_modules/semver/classes/semver.js b/node_modules/npm/node_modules/semver/classes/semver.js index 99dbe82..84e8459 100644 --- a/node_modules/npm/node_modules/semver/classes/semver.js +++ b/node_modules/npm/node_modules/semver/classes/semver.js @@ -1,6 +1,6 @@ const debug = require('../internal/debug') const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const parseOptions = require('../internal/parse-options') const { compareIdentifiers } = require('../internal/identifiers') @@ -291,8 +291,10 @@ class SemVer { default: throw new Error(`invalid increment argument: ${release}`) } - this.format() - this.raw = this.version + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } return this } } diff --git a/node_modules/npm/node_modules/semver/functions/coerce.js b/node_modules/npm/node_modules/semver/functions/coerce.js index 2e01452..febbff9 100644 --- a/node_modules/npm/node_modules/semver/functions/coerce.js +++ b/node_modules/npm/node_modules/semver/functions/coerce.js @@ -1,6 +1,6 @@ const SemVer = require('../classes/semver') const parse = require('./parse') -const { re, t } = require('../internal/re') +const { safeRe: re, t } = require('../internal/re') const coerce = (version, options) => { if (version instanceof SemVer) { diff --git a/node_modules/npm/node_modules/semver/functions/diff.js b/node_modules/npm/node_modules/semver/functions/diff.js index fafc11c..fc224e3 100644 --- a/node_modules/npm/node_modules/semver/functions/diff.js +++ b/node_modules/npm/node_modules/semver/functions/diff.js @@ -13,6 +13,35 @@ const diff = (version1, version2) => { const highVersion = v1Higher ? v1 : v2 const lowVersion = v1Higher ? v2 : v1 const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } // add the `pre` prefix if we are going to a prerelease version const prefix = highHasPre ? 'pre' : '' @@ -29,26 +58,8 @@ const diff = (version1, version2) => { return prefix + 'patch' } - // at this point we know stable versions match but overall versions are not equal, - // so either they are both prereleases, or the lower version is a prerelease - - if (highHasPre) { - // high and low are preleases - return 'prerelease' - } - - if (lowVersion.patch) { - // anything higher than a patch bump would result in the wrong version - return 'patch' - } - - if (lowVersion.minor) { - // anything higher than a minor bump would result in the wrong version - return 'minor' - } - - // bumping major/minor/patch all have same result - return 'major' + // high and low are preleases + return 'prerelease' } module.exports = diff diff --git a/node_modules/npm/node_modules/semver/internal/re.js b/node_modules/npm/node_modules/semver/internal/re.js index ed88398..f73ef1a 100644 --- a/node_modules/npm/node_modules/semver/internal/re.js +++ b/node_modules/npm/node_modules/semver/internal/re.js @@ -4,16 +4,27 @@ exports = module.exports = {} // The actual regexps go on exports.re const re = exports.re = [] +const safeRe = exports.safeRe = [] const src = exports.src = [] const t = exports.t = {} let R = 0 const createToken = (name, value, isGlobal) => { + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + const safe = value + .split('\\s*').join('\\s{0,1}') + .split('\\s+').join('\\s') const index = R++ debug(name, index, value) t[name] = index src[index] = value re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) } // The following Regular Expressions can be used for tokenizing, diff --git a/node_modules/npm/node_modules/semver/package.json b/node_modules/npm/node_modules/semver/package.json index 592404a..7d0aff3 100644 --- a/node_modules/npm/node_modules/semver/package.json +++ b/node_modules/npm/node_modules/semver/package.json @@ -1,6 +1,6 @@ { "name": "semver", - "version": "7.5.1", + "version": "7.5.2", "description": "The semantic version parser used by npm.", "main": "index.js", "scripts": { @@ -14,7 +14,7 @@ }, "devDependencies": { "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/template-oss": "4.15.1", "tap": "^16.0.0" }, "license": "ISC", @@ -37,7 +37,7 @@ "range.bnf" ], "tap": { - "check-coverage": true, + "timeout": 30, "coverage-map": "map.js", "nyc-arg": [ "--exclude", @@ -53,7 +53,7 @@ "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.15.1", "engines": ">=10", "ciVersions": [ "10.0.0", diff --git a/node_modules/npm/node_modules/sigstore/README.md b/node_modules/npm/node_modules/sigstore/README.md index fd9260f..2540fa8 100644 --- a/node_modules/npm/node_modules/sigstore/README.md +++ b/node_modules/npm/node_modules/sigstore/README.md @@ -42,7 +42,8 @@ necessary to verify the signature. * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`. * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested. * `tlogUpload` ``: Flag indicating whether or not the signature should be recorded on the Rekor transparency log. Defaults to `true`. - * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. + * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`. + * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`. ### attest(payload, payloadType[, options]) @@ -57,7 +58,8 @@ as well as the verification material necessary to verify the signature. * `rekorURL` ``: The base URL of the Rekor instance to use when adding the signature to the transparency log. Defaults to `'https://rekor.sigstore.dev'`. * `tsaServerURL` ``: The base URL of the Timestamp Authority instance to use when requesting a signed timestamp. If omitted, no timestamp will be requested. * `tlogUpload` ``: Flag indicating whether or not the signed statement should be recorded on the Rekor transparency log. Defaults to `true`. - * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. + * `identityToken` ``: The OIDC token identifying the signer. If no explicit token is supplied, an attempt will be made to retrieve one from the environment. This config cannot be used with `identityProvider`. + * `identityProvider` ``: Object which implements `getToken: () => Promise`. The supplied provider will be used to retrieve an OIDC token. If no provider is supplied, an attempt will be made to retrieve an OIDC token from the environment. This config cannot be used with `identityToken`. ### verify(bundle[, payload][, options]) diff --git a/node_modules/npm/node_modules/sigstore/dist/config.d.ts b/node_modules/npm/node_modules/sigstore/dist/config.d.ts index 46be669..89f4203 100644 --- a/node_modules/npm/node_modules/sigstore/dist/config.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/config.d.ts @@ -27,6 +27,7 @@ export type TUFOptions = { tufCachePath?: string; } & FetchOptions; export type SignOptions = { + identityProvider?: Provider; tlogUpload?: boolean; } & CAOptions & TLogOptions & TSAOptions & FetchOptions & IdentityProviderOptions; export type VerifyOptions = { @@ -38,6 +39,9 @@ export type VerifyOptions = { certificateOIDs?: Record; keySelector?: KeySelector; } & TLogOptions & TUFOptions; +export type CreateVerifierOptions = { + keySelector?: KeySelector; +} & TUFOptions; export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev"; export declare const DEFAULT_REKOR_URL = "https://rekor.sigstore.dev"; export declare const DEFAULT_RETRY: Retry; diff --git a/node_modules/npm/node_modules/sigstore/dist/external/rekor.d.ts b/node_modules/npm/node_modules/sigstore/dist/external/rekor.d.ts index fde9a50..6729ad3 100644 --- a/node_modules/npm/node_modules/sigstore/dist/external/rekor.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/external/rekor.d.ts @@ -1,17 +1,12 @@ -import { Entry, EntryKind } from '../tlog'; +import type { LogEntry, ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, InclusionProof as RekorInclusionProof, SearchIndex, SearchLogQuery } from '@sigstore/rekor-types'; import type { FetchOptions } from '../types/fetch'; +export type { ProposedDSSEEntry, ProposedEntry, ProposedHashedRekordEntry, ProposedIntotoEntry, RekorInclusionProof, SearchIndex, SearchLogQuery, }; +export type Entry = { + uuid: string; +} & LogEntry['x']; export type RekorOptions = { baseURL: string; } & FetchOptions; -export interface SearchIndex { - email?: string; - hash?: string; -} -export interface SearchLogQuery { - entries?: EntryKind[]; - entryUUIDs?: string[]; - logIndexes?: number[]; -} /** * Rekor API client. */ @@ -21,10 +16,10 @@ export declare class Rekor { constructor(options: RekorOptions); /** * Create a new entry in the Rekor log. - * @param propsedEntry {EntryKind} Data to create a new entry + * @param propsedEntry {ProposedEntry} Data to create a new entry * @returns {Promise} The created entry */ - createEntry(propsedEntry: EntryKind): Promise; + createEntry(propsedEntry: ProposedEntry): Promise; /** * Get an entry from the Rekor log. * @param uuid {string} The UUID of the entry to retrieve diff --git a/node_modules/npm/node_modules/sigstore/dist/external/rekor.js b/node_modules/npm/node_modules/sigstore/dist/external/rekor.js index 80650ce..b6bbeb6 100644 --- a/node_modules/npm/node_modules/sigstore/dist/external/rekor.js +++ b/node_modules/npm/node_modules/sigstore/dist/external/rekor.js @@ -39,7 +39,7 @@ class Rekor { } /** * Create a new entry in the Rekor log. - * @param propsedEntry {EntryKind} Data to create a new entry + * @param propsedEntry {ProposedEntry} Data to create a new entry * @returns {Promise} The created entry */ async createEntry(propsedEntry) { @@ -107,7 +107,7 @@ function entryFromResponse(data) { throw new Error('Received multiple entries in Rekor response'); } // Grab UUID and entry data from the response - const [uuid, entry] = Object.entries(data)[0]; + const [uuid, entry] = entries[0]; return { ...entry, uuid, diff --git a/node_modules/npm/node_modules/sigstore/dist/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/index.d.ts index fb23e5e..dbac064 100644 --- a/node_modules/npm/node_modules/sigstore/dist/index.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/index.d.ts @@ -1 +1,2 @@ +export type { Provider as IdentityProvider } from './identity'; export * as sigstore from './sigstore'; diff --git a/node_modules/npm/node_modules/sigstore/dist/index.js b/node_modules/npm/node_modules/sigstore/dist/index.js index 502155e..126fce5 100644 --- a/node_modules/npm/node_modules/sigstore/dist/index.js +++ b/node_modules/npm/node_modules/sigstore/dist/index.js @@ -24,19 +24,4 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); exports.sigstore = void 0; -/* -Copyright 2022 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ exports.sigstore = __importStar(require("./sigstore")); diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/digest.d.ts b/node_modules/npm/node_modules/sigstore/dist/merkle/digest.d.ts deleted file mode 100644 index 5c69221..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/digest.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -/// -export declare class Hasher { - private algorithm; - constructor(algorithm?: string); - size(): number; - hashLeaf(leaf: Buffer): Buffer; - hashChildren(l: Buffer, r: Buffer): Buffer; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/digest.js b/node_modules/npm/node_modules/sigstore/dist/merkle/digest.js deleted file mode 100644 index 5b7ff04..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/digest.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; -/* -Copyright 2022 GitHub, Inc - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Hasher = void 0; -const crypto_1 = __importDefault(require("crypto")); -const RFC6962LeafHashPrefix = Buffer.from([0x00]); -const RFC6962NodeHashPrefix = Buffer.from([0x01]); -// Implements Merkle Tree Hash logic according to RFC6962. -// https://datatracker.ietf.org/doc/html/rfc6962#section-2 -class Hasher { - constructor(algorithm = 'sha256') { - this.algorithm = algorithm; - } - size() { - return crypto_1.default.createHash(this.algorithm).digest().length; - } - hashLeaf(leaf) { - const hasher = crypto_1.default.createHash(this.algorithm); - hasher.update(RFC6962LeafHashPrefix); - hasher.update(leaf); - return hasher.digest(); - } - hashChildren(l, r) { - const hasher = crypto_1.default.createHash(this.algorithm); - hasher.update(RFC6962NodeHashPrefix); - hasher.update(l); - hasher.update(r); - return hasher.digest(); - } -} -exports.Hasher = Hasher; diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/merkle/index.d.ts deleted file mode 100644 index d8ffe7c..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export { Hasher } from './digest'; -export { verifyInclusion } from './verify'; diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/verify.d.ts b/node_modules/npm/node_modules/sigstore/dist/merkle/verify.d.ts deleted file mode 100644 index b1b28b7..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/verify.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -/// -import { Hasher } from './digest'; -export declare function verifyInclusion(hasher: Hasher, index: bigint, size: bigint, leafHash: Buffer, proof: Buffer[], root: Buffer): boolean; diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/verify.js b/node_modules/npm/node_modules/sigstore/dist/merkle/verify.js deleted file mode 100644 index 3455434..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/verify.js +++ /dev/null @@ -1,78 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyInclusion = void 0; -/* -Copyright 2022 GitHub, Inc - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ -// Implementation largely copied from -// https://github.com/transparency-dev/merkle/blob/main/proof/verify.go#L46 -// Verifies the correctness of the inclusion proof for the given leaf hash -// and index relative to the tree of the given size and root hash. -function verifyInclusion(hasher, index, size, leafHash, proof, root) { - const calcroot = rootFromInclusionProof(hasher, index, size, leafHash, proof); - return calcroot.equals(root); -} -exports.verifyInclusion = verifyInclusion; -// Calculates the expected root hash for a tree of the given size, provided a -// leaf index and hash with corresponding inclusion proof. -function rootFromInclusionProof(hasher, index, size, leafHash, proof) { - if (index >= size) { - throw new Error('index exceeds size of tree'); - } - if (leafHash.length !== hasher.size()) { - throw new Error('leafHash has unexpected size'); - } - const { inner, border } = decompInclProof(index, size); - if (proof.length != inner + border) { - throw new Error('invalid proof length'); - } - let hash = chainInner(hasher, leafHash, proof.slice(0, inner), index); - hash = chainBorderRight(hasher, hash, proof.slice(inner)); - return hash; -} -// Breaks down inclusion proof for a leaf at the specified index in a tree of -// the specified size. The split point is where paths to the index leaf and -// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof -// parts. -function decompInclProof(index, size) { - const inner = innerProofSize(index, size); - const border = onesCount(index >> BigInt(inner)); - return { inner, border }; -} -// Computes a subtree hash for an node on or below the tree's right border. -// Assumes the provided proof hashes are ordered from lower to higher levels -// and seed is the initial hash of the node specified by the index. -function chainInner(hasher, seed, proof, index) { - return proof.reduce((acc, h, i) => { - if ((index >> BigInt(i)) & BigInt(1)) { - return hasher.hashChildren(h, acc); - } - else { - return hasher.hashChildren(acc, h); - } - }, seed); -} -// Computes a subtree hash for nodes along the tree's right border. -function chainBorderRight(hasher, seed, proof) { - return proof.reduce((acc, h) => hasher.hashChildren(h, acc), seed); -} -function innerProofSize(index, size) { - return (index ^ (size - BigInt(1))).toString(2).length; -} -// Counts the number of ones in the binary representation of the given number. -// https://en.wikipedia.org/wiki/Hamming_weight -function onesCount(x) { - return x.toString(2).split('1').length - 1; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/sigstore-utils.js b/node_modules/npm/node_modules/sigstore/dist/sigstore-utils.js index 1341052..dc75692 100644 --- a/node_modules/npm/node_modules/sigstore/dist/sigstore-utils.js +++ b/node_modules/npm/node_modules/sigstore/dist/sigstore-utils.js @@ -75,6 +75,6 @@ async function createRekorEntry(dsseEnvelope, publicKey, options = {}) { signature: sigMaterial, tlogEntry: entry, }); - return sigstore.Bundle.toJSON(bundle); + return sigstore.bundleToJSON(bundle); } exports.createRekorEntry = createRekorEntry; diff --git a/node_modules/npm/node_modules/sigstore/dist/sigstore.d.ts b/node_modules/npm/node_modules/sigstore/dist/sigstore.d.ts index d4fba2b..1da5e8e 100644 --- a/node_modules/npm/node_modules/sigstore/dist/sigstore.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/sigstore.d.ts @@ -1,18 +1,22 @@ /// +import * as tuf from '@sigstore/tuf'; import * as config from './config'; -import * as tuf from './tuf'; import * as sigstore from './types/sigstore'; export declare function sign(payload: Buffer, options?: config.SignOptions): Promise; export declare function attest(payload: Buffer, payloadType: string, options?: config.SignOptions): Promise; export declare function verify(bundle: sigstore.SerializedBundle, payload?: Buffer, options?: config.VerifyOptions): Promise; +export interface BundleVerifier { + verify(bundle: sigstore.SerializedBundle): void; +} +export declare function createVerifier(options: config.CreateVerifierOptions): Promise; declare const tufUtils: { client: (options?: config.TUFOptions) => Promise; getTarget: (path: string, options?: config.TUFOptions) => Promise; }; +export type { TUF } from '@sigstore/tuf'; export type { SignOptions, VerifyOptions } from './config'; export { InternalError, PolicyError, ValidationError, VerificationError, } from './error'; export * as utils from './sigstore-utils'; -export type { TUF } from './tuf'; export type { SerializedBundle as Bundle, SerializedEnvelope as Envelope, } from './types/sigstore'; export { tufUtils as tuf }; export declare const DEFAULT_FULCIO_URL = "https://fulcio.sigstore.dev"; diff --git a/node_modules/npm/node_modules/sigstore/dist/sigstore.js b/node_modules/npm/node_modules/sigstore/dist/sigstore.js index 8d245e1..dca476d 100644 --- a/node_modules/npm/node_modules/sigstore/dist/sigstore.js +++ b/node_modules/npm/node_modules/sigstore/dist/sigstore.js @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.tuf = exports.utils = exports.VerificationError = exports.ValidationError = exports.PolicyError = exports.InternalError = exports.verify = exports.attest = exports.sign = void 0; +exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.tuf = exports.utils = exports.VerificationError = exports.ValidationError = exports.PolicyError = exports.InternalError = exports.createVerifier = exports.verify = exports.attest = exports.sign = void 0; /* Copyright 2023 The Sigstore Authors. @@ -39,9 +39,9 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ +const tuf = __importStar(require("@sigstore/tuf")); const config = __importStar(require("./config")); const sign_1 = require("./sign"); -const tuf = __importStar(require("./tuf")); const sigstore = __importStar(require("./types/sigstore")); const verify_1 = require("./verify"); async function sign(payload, options = {}) { @@ -51,11 +51,13 @@ async function sign(payload, options = {}) { const signer = new sign_1.Signer({ ca, tlog, - identityProviders: idps, + identityProviders: options.identityProvider + ? [options.identityProvider] + : idps, tlogUpload: options.tlogUpload, }); const bundle = await signer.signBlob(payload); - return sigstore.Bundle.toJSON(bundle); + return sigstore.bundleToJSON(bundle); } exports.sign = sign; async function attest(payload, payloadType, options = {}) { @@ -67,11 +69,13 @@ async function attest(payload, payloadType, options = {}) { ca, tlog, tsa, - identityProviders: idps, + identityProviders: options.identityProvider + ? [options.identityProvider] + : idps, tlogUpload: options.tlogUpload, }); const bundle = await signer.signAttestation(payload, payloadType); - return sigstore.Bundle.toJSON(bundle); + return sigstore.bundleToJSON(bundle); } exports.attest = attest; async function verify(bundle, payload, options = {}) { @@ -88,22 +92,47 @@ async function verify(bundle, payload, options = {}) { return verifier.verify(deserializedBundle, opts, payload); } exports.verify = verify; +async function createVerifier(options) { + const trustedRoot = await tuf.getTrustedRoot({ + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + cachePath: options.tufCachePath, + retry: options.retry ?? config.DEFAULT_RETRY, + timeout: options.timeout ?? config.DEFAULT_TIMEOUT, + }); + const verifier = new verify_1.Verifier(trustedRoot, options.keySelector); + const verifyOpts = config.artifactVerificationOptions(options); + return { + verify: (bundle) => { + const deserializedBundle = sigstore.bundleFromJSON(bundle); + return verifier.verify(deserializedBundle, verifyOpts); + }, + }; +} +exports.createVerifier = createVerifier; const tufUtils = { client: (options = {}) => { - const t = new tuf.TUFClient({ + return tuf.initTUF({ mirrorURL: options.tufMirrorURL, rootPath: options.tufRootPath, cachePath: options.tufCachePath, - retry: options.retry ?? config.DEFAULT_RETRY, - timeout: options.timeout ?? config.DEFAULT_TIMEOUT, + retry: options.retry, + timeout: options.timeout, }); - return t.refresh().then(() => t); }, /* * @deprecated Use tufUtils.client instead. */ getTarget: (path, options = {}) => { - return tufUtils.client(options).then((t) => t.getTarget(path)); + return tuf + .initTUF({ + mirrorURL: options.tufMirrorURL, + rootPath: options.tufRootPath, + cachePath: options.tufCachePath, + retry: options.retry, + timeout: options.timeout, + }) + .then((t) => t.getTarget(path)); }, }; exports.tuf = tufUtils; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/format.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/format.d.ts index 92251b3..8a00f54 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/format.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/format.d.ts @@ -1,6 +1,7 @@ /// import { SignatureMaterial } from '../types/signature'; import { Envelope } from '../types/sigstore'; -import { HashedRekordKind, IntotoKind } from './types'; -export declare function toProposedHashedRekordEntry(digest: Buffer, signature: SignatureMaterial): HashedRekordKind; -export declare function toProposedIntotoEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): IntotoKind; +import type { ProposedDSSEEntry, ProposedHashedRekordEntry, ProposedIntotoEntry } from '../external/rekor'; +export declare function toProposedDSSEEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedDSSEEntry; +export declare function toProposedHashedRekordEntry(digest: Buffer, signature: SignatureMaterial): ProposedHashedRekordEntry; +export declare function toProposedIntotoEntry(envelope: Envelope, signature: SignatureMaterial, apiVersion?: string): ProposedIntotoEntry; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/format.js b/node_modules/npm/node_modules/sigstore/dist/tlog/format.js index 6707709..b0eae95 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/format.js +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/format.js @@ -1,10 +1,22 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = void 0; +exports.toProposedIntotoEntry = exports.toProposedHashedRekordEntry = exports.toProposedDSSEEntry = void 0; +const sigstore_1 = require("../types/sigstore"); const util_1 = require("../util"); -const types_1 = require("./types"); +const DEFAULT_DSSE_API_VERSION = '0.0.1'; const DEFAULT_HASHEDREKORD_API_VERSION = '0.0.1'; const DEFAULT_INTOTO_API_VERSION = '0.0.2'; +// Returns a properly formatted Rekor "dsse" entry for the given DSSE +// envelope and signature +function toProposedDSSEEntry(envelope, signature, apiVersion = DEFAULT_DSSE_API_VERSION) { + switch (apiVersion) { + case '0.0.1': + return toProposedDSSEV001Entry(envelope, signature); + default: + throw new Error(`Unsupported dsse kind API version: ${apiVersion}`); + } +} +exports.toProposedDSSEEntry = toProposedDSSEEntry; // Returns a properly formatted Rekor "hashedrekord" entry for the given digest // and signature function toProposedHashedRekordEntry(digest, signature) { @@ -13,7 +25,7 @@ function toProposedHashedRekordEntry(digest, signature) { const b64Key = util_1.encoding.base64Encode(toPublicKey(signature)); return { apiVersion: DEFAULT_HASHEDREKORD_API_VERSION, - kind: types_1.HASHEDREKORD_KIND, + kind: 'hashedrekord', spec: { data: { hash: { @@ -42,11 +54,23 @@ function toProposedIntotoEntry(envelope, signature, apiVersion = DEFAULT_INTOTO_ } } exports.toProposedIntotoEntry = toProposedIntotoEntry; +function toProposedDSSEV001Entry(envelope, signature) { + return { + apiVersion: '0.0.1', + kind: 'dsse', + spec: { + proposedContent: { + envelope: JSON.stringify(sigstore_1.Envelope.toJSON(envelope)), + verifiers: [util_1.encoding.base64Encode(toPublicKey(signature))], + }, + }, + }; +} function toProposedIntotoV002Entry(envelope, signature) { // Calculate the value for the payloadHash field in the Rekor entry const payloadHash = util_1.crypto.hash(envelope.payload).toString('hex'); // Calculate the value for the hash field in the Rekor entry - const envelopeHash = calculateDSSEHash(envelope); + const envelopeHash = calculateDSSEHash(envelope, signature); // Collect values for re-creating the DSSE envelope. // Double-encode payload and signature cause that's what Rekor expects const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64')); @@ -56,7 +80,7 @@ function toProposedIntotoV002Entry(envelope, signature) { // Create the envelope portion of the entry. Note the inclusion of the // publicKey in the signature struct is not a standard part of a DSSE // envelope, but is required by Rekor. - const dsse = { + const dsseEnv = { payloadType: envelope.payloadType, payload: payload, signatures: [{ sig, publicKey }], @@ -65,14 +89,14 @@ function toProposedIntotoV002Entry(envelope, signature) { // need to do the same here so that we can properly recreate the entry for // verification. if (keyid.length > 0) { - dsse.signatures[0].keyid = keyid; + dsseEnv.signatures[0].keyid = keyid; } return { apiVersion: '0.0.2', - kind: types_1.INTOTO_KIND, + kind: 'intoto', spec: { content: { - envelope: dsse, + envelope: dsseEnv, hash: { algorithm: 'sha256', value: envelopeHash }, payloadHash: { algorithm: 'sha256', value: payloadHash }, }, @@ -86,17 +110,22 @@ function toProposedIntotoV002Entry(envelope, signature) { // * signature is base64 encoded (only the first signature is used) // * keyid is included ONLY if it is NOT an empty string // * The resulting JSON is canonicalized and hashed to a hex string -function calculateDSSEHash(envelope) { - const dsse = { +function calculateDSSEHash(envelope, signature) { + const dsseEnv = { payloadType: envelope.payloadType, payload: envelope.payload.toString('base64'), - signatures: [{ sig: envelope.signatures[0].sig.toString('base64') }], + signatures: [ + { + sig: envelope.signatures[0].sig.toString('base64'), + publicKey: toPublicKey(signature), + }, + ], }; // If the keyid is an empty string, Rekor seems to remove it altogether. if (envelope.signatures[0].keyid.length > 0) { - dsse.signatures[0].keyid = envelope.signatures[0].keyid; + dsseEnv.signatures[0].keyid = envelope.signatures[0].keyid; } - return util_1.crypto.hash(util_1.json.canonicalize(dsse)).toString('hex'); + return util_1.crypto.hash(util_1.json.canonicalize(dsseEnv)).toString('hex'); } function toPublicKey(signature) { return signature.certificates diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/index.d.ts index 7ef070c..6bb7d42 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/index.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/index.d.ts @@ -1,12 +1,11 @@ /// import { SignatureMaterial } from '../types/signature'; import * as sigstore from '../types/sigstore'; -import { Entry } from './types'; +import type { Entry } from '../external/rekor'; import type { FetchOptions } from '../types/fetch'; interface CreateEntryOptions { fetchOnConflict?: boolean; } -export { Entry, EntryKind, HashedRekordKind } from './types'; export interface TLog { createMessageSignatureEntry: (digest: Buffer, sigMaterial: SignatureMaterial) => Promise; createDSSEEntry: (envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions) => Promise; @@ -21,3 +20,4 @@ export declare class TLogClient implements TLog { createDSSEEntry(envelope: sigstore.Envelope, sigMaterial: SignatureMaterial, options?: CreateEntryOptions): Promise; private createEntry; } +export {}; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts deleted file mode 100644 index bfe4e83..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.d.ts +++ /dev/null @@ -1,50 +0,0 @@ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -/** - * Schema for Rekord objects - */ -export type RekorSchema = HashedRekorV001Schema; -/** - * Schema for Hashed Rekord object - */ -export interface HashedRekorV001Schema { - /** - * Information about the detached signature associated with the entry - */ - signature: { - /** - * Specifies the content of the signature inline within the document - */ - content?: string; - /** - * The public key that can verify the signature; this can also be an X509 code signing certificate that contains the raw public key information - */ - publicKey?: { - /** - * Specifies the content of the public key or code signing certificate inline within the document - */ - content?: string; - }; - }; - /** - * Information about the content associated with the entry - */ - data: { - /** - * Specifies the hash algorithm and value for the content - */ - hash?: { - /** - * The hashing function used to compute the hash value - */ - algorithm: "sha256"; - /** - * The hash value for the content - */ - value: string; - }; - }; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js b/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js deleted file mode 100644 index 61923a6..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/hashedrekord.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -/* eslint-disable */ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts deleted file mode 100644 index c60c7e0..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.d.ts +++ /dev/null @@ -1,131 +0,0 @@ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -/** - * Intoto for Rekord objects - */ -export type IntotoSchema = IntotoV001Schema | IntotoV002Schema; -/** - * Schema for intoto object - */ -export interface IntotoV001Schema { - content: { - /** - * envelope - */ - envelope?: string; - /** - * Specifies the hash algorithm and value encompassing the entire signed envelope - */ - hash?: { - /** - * The hashing function used to compute the hash value - */ - algorithm: "sha256"; - /** - * The hash value for the archive - */ - value: string; - }; - /** - * Specifies the hash algorithm and value covering the payload within the DSSE envelope - */ - payloadHash?: { - /** - * The hashing function used to compute the hash value - */ - algorithm: "sha256"; - /** - * The hash value for the envelope's payload - */ - value: string; - }; - }; - /** - * The public key that can verify the signature - */ - publicKey: string; -} -/** - * Schema for intoto object - */ -export interface IntotoV002Schema { - content: { - /** - * dsse envelope - */ - envelope?: { - /** - * payload of the envelope - */ - payload?: string; - /** - * type describing the payload - */ - payloadType: string; - /** - * collection of all signatures of the envelope's payload - * - * @minItems 1 - */ - signatures: [ - { - /** - * optional id of the key used to create the signature - */ - keyid?: string; - /** - * signature of the payload - */ - sig?: string; - /** - * public key that corresponds to this signature - */ - publicKey?: string; - }, - ...{ - /** - * optional id of the key used to create the signature - */ - keyid?: string; - /** - * signature of the payload - */ - sig?: string; - /** - * public key that corresponds to this signature - */ - publicKey?: string; - }[] - ]; - }; - /** - * Specifies the hash algorithm and value encompassing the entire signed envelope - */ - hash?: { - /** - * The hashing function used to compute the hash value - */ - algorithm: "sha256"; - /** - * The hash value for the archive - */ - value: string; - }; - /** - * Specifies the hash algorithm and value covering the payload within the DSSE envelope - */ - payloadHash?: { - /** - * The hashing function used to compute the hash value - */ - algorithm: "sha256"; - /** - * The hash value of the payload - */ - value: string; - }; - }; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js b/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js deleted file mode 100644 index 61923a6..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/__generated__/intoto.js +++ /dev/null @@ -1,8 +0,0 @@ -"use strict"; -/* eslint-disable */ -/** - * This file was automatically generated by json-schema-to-typescript. - * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, - * and run json-schema-to-typescript to regenerate this file. - */ -Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.d.ts deleted file mode 100644 index 06be133..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.d.ts +++ /dev/null @@ -1,38 +0,0 @@ -import { HashedRekorV001Schema } from './__generated__/hashedrekord'; -import { IntotoV001Schema, IntotoV002Schema } from './__generated__/intoto'; -export declare const INTOTO_KIND = "intoto"; -export declare const HASHEDREKORD_KIND = "hashedrekord"; -export type HashedRekordKind = { - apiVersion: '0.0.1'; - kind: typeof HASHEDREKORD_KIND; - spec: HashedRekorV001Schema; -}; -export type IntotoKind = { - apiVersion: '0.0.1'; - kind: typeof INTOTO_KIND; - spec: IntotoV001Schema; -} | { - apiVersion: '0.0.2'; - kind: typeof INTOTO_KIND; - spec: IntotoV002Schema; -}; -export type EntryKind = HashedRekordKind | IntotoKind; -export interface Entry { - uuid: string; - body: string; - integratedTime: number; - logID: string; - logIndex: number; - verification: EntryVerification; - attestation?: object; -} -export interface EntryVerification { - inclusionProof: InclusionProof; - signedEntryTimestamp: string; -} -export interface InclusionProof { - hashes: string[]; - logIndex: number; - rootHash: string; - treeSize: number; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.js b/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.js deleted file mode 100644 index d6394a9..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/types/index.js +++ /dev/null @@ -1,5 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.HASHEDREKORD_KIND = exports.INTOTO_KIND = void 0; -exports.INTOTO_KIND = 'intoto'; -exports.HASHEDREKORD_KIND = 'hashedrekord'; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/body.js b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/body.js index 086e068..5a265e5 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/body.js +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/body.js @@ -28,6 +28,9 @@ function verifyTLogBody(entry, bundleContent) { throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); } switch (body.kind) { + case 'dsse': + verifyDSSETLogBody(body, bundleContent); + break; case 'intoto': verifyIntotoTLogBody(body, bundleContent); break; @@ -45,6 +48,20 @@ function verifyTLogBody(entry, bundleContent) { } exports.verifyTLogBody = verifyTLogBody; // Compare the given intoto tlog entry to the given bundle +function verifyDSSETLogBody(tlogEntry, content) { + if (content?.$case !== 'dsseEnvelope') { + throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`); + } + const dsse = content.dsseEnvelope; + switch (tlogEntry.apiVersion) { + case '0.0.1': + verifyDSSE001TLogBody(tlogEntry, dsse); + break; + default: + throw new error_1.VerificationError(`unsupported dsse version: ${tlogEntry.apiVersion}`); + } +} +// Compare the given intoto tlog entry to the given bundle function verifyIntotoTLogBody(tlogEntry, content) { if (content?.$case !== 'dsseEnvelope') { throw new error_1.VerificationError(`unsupported bundle content: ${content?.$case || 'unknown'}`); @@ -72,6 +89,28 @@ function verifyHashedRekordTLogBody(tlogEntry, content) { throw new error_1.VerificationError(`unsupported hashedrekord version: ${tlogEntry.apiVersion}`); } } +// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope. +function verifyDSSE001TLogBody(tlogEntry, dsse) { + // Collect all of the signatures from the DSSE envelope + // Turns them into base64-encoded strings for comparison + const dsseSigs = dsse.signatures.map((signature) => signature.sig.toString('base64')); + // Collect all of the signatures from the tlog entry + const tlogSigs = tlogEntry.spec.signatures?.map((signature) => signature.signature); + // Ensure the bundle's DSSE and the tlog entry contain the same number of signatures + if (dsseSigs.length !== tlogSigs?.length) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } + // Ensure that every signature in the bundle's DSSE is present in the tlog entry + if (!dsseSigs.every((dsseSig) => tlogSigs.includes(dsseSig))) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } + // Ensure the digest of the bundle's DSSE payload matches the digest in the + // tlog entry + const dssePayloadHash = util_1.crypto.hash(dsse.payload).toString('hex'); + if (dssePayloadHash !== tlogEntry.spec.payloadHash?.value) { + throw new error_1.VerificationError(TLOG_MISMATCH_ERROR_MSG); + } +} // Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope. function verifyIntoto002TLogBody(tlogEntry, dsse) { // Collect all of the signatures from the DSSE envelope diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts index 8ab4276..4f96f82 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.d.ts @@ -1,2 +1,2 @@ import * as sigstore from '../../types/sigstore'; -export declare function verifyTLogEntries(bundle: sigstore.BundleWithVerificationMaterial, trustedRoot: sigstore.TrustedRoot, options: sigstore.ArtifactVerificationOptions_TlogOptions): void; +export declare function verifyTLogEntries(bundle: sigstore.Bundle, trustedRoot: sigstore.TrustedRoot, options: sigstore.ArtifactVerificationOptions_TlogOptions): void; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.js b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.js index ad655b6..cbb9313 100644 --- a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.js +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/index.js @@ -41,6 +41,7 @@ limitations under the License. */ const error_1 = require("../../error"); const sigstore = __importStar(require("../../types/sigstore")); +const cert_1 = require("../../x509/cert"); const body_1 = require("./body"); const set_1 = require("./set"); // Verifies that the number of tlog entries that pass offline verification @@ -50,7 +51,7 @@ function verifyTLogEntries(bundle, trustedRoot, options) { throw new error_1.VerificationError('Online verification not implemented'); } // Extract the signing cert, if available - const signingCert = sigstore.signingCertificate(bundle); + const signingCert = signingCertificate(bundle); // Iterate over the tlog entries and verify each one const verifiedEntries = bundle.verificationMaterial.tlogEntries.filter((entry) => verifyTLogEntryOffline(entry, bundle.content, trustedRoot.tlogs, signingCert)); if (verifiedEntries.length < options.threshold) { @@ -73,3 +74,10 @@ function verifyTLogEntryOffline(entry, bundleContent, tlogs, signingCert) { (0, set_1.verifyTLogSET)(entry, tlogs) && verifyTLogIntegrationTime()); } +function signingCertificate(bundle) { + if (!sigstore.isBundleWithCertificateChain(bundle)) { + return undefined; + } + const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0]; + return cert_1.x509Certificate.parse(signingCert.rawBytes); +} diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts new file mode 100644 index 0000000..a2c4762 --- /dev/null +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.d.ts @@ -0,0 +1,2 @@ +import * as sigstore from '../../types/sigstore'; +export declare function verifyMerkleInclusion(entry: sigstore.TransparencyLogEntry): boolean; diff --git a/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.js b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.js new file mode 100644 index 0000000..90609cb --- /dev/null +++ b/node_modules/npm/node_modules/sigstore/dist/tlog/verify/merkle.js @@ -0,0 +1,109 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.verifyMerkleInclusion = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +const crypto_1 = __importDefault(require("crypto")); +const error_1 = require("../../error"); +const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]); +const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]); +function verifyMerkleInclusion(entry) { + const inclusionProof = entry.inclusionProof; + if (!inclusionProof) { + throw new error_1.VerificationError('tlog entry has no inclusion proof'); + } + const logIndex = BigInt(inclusionProof.logIndex); + const treeSize = BigInt(inclusionProof.treeSize); + if (logIndex < 0n || logIndex >= treeSize) { + throw new error_1.VerificationError('invalid inclusion proof index'); + } + // Figure out which subset of hashes corresponds to the inner and border + // nodes + const { inner, border } = decompInclProof(logIndex, treeSize); + if (inclusionProof.hashes.length !== inner + border) { + throw new error_1.VerificationError('invalid inclusion proof length'); + } + const innerHashes = inclusionProof.hashes.slice(0, inner); + const borderHashes = inclusionProof.hashes.slice(inner); + // The entry's hash is the leaf hash + const leafHash = hashLeaf(entry.canonicalizedBody); + // Chain the hashes belonging to the inner and border portions + const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes); + // Calculated hash should match the root hash in the inclusion proof + return bufferEqual(calculatedHash, inclusionProof.rootHash); +} +exports.verifyMerkleInclusion = verifyMerkleInclusion; +// Breaks down inclusion proof for a leaf at the specified index in a tree of +// the specified size. The split point is where paths to the index leaf and +// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof +// parts. +function decompInclProof(index, size) { + const inner = innerProofSize(index, size); + const border = onesCount(index >> BigInt(inner)); + return { inner, border }; +} +// Computes a subtree hash for a node on or below the tree's right border. +// Assumes the provided proof hashes are ordered from lower to higher levels +// and seed is the initial hash of the node specified by the index. +function chainInner(seed, hashes, index) { + return hashes.reduce((acc, h, i) => { + if ((index >> BigInt(i)) & BigInt(1)) { + return hashChildren(h, acc); + } + else { + return hashChildren(acc, h); + } + }, seed); +} +// Computes a subtree hash for nodes along the tree's right border. +function chainBorderRight(seed, hashes) { + return hashes.reduce((acc, h) => hashChildren(h, acc), seed); +} +function innerProofSize(index, size) { + return (index ^ (size - BigInt(1))).toString(2).length; +} +// Counts the number of ones in the binary representation of the given number. +// https://en.wikipedia.org/wiki/Hamming_weight +function onesCount(x) { + return x.toString(2).split('1').length - 1; +} +// Hashing logic according to RFC6962. +// https://datatracker.ietf.org/doc/html/rfc6962#section-2 +function hashChildren(left, right) { + const hasher = crypto_1.default.createHash('sha256'); + hasher.update(RFC6962_NODE_HASH_PREFIX); + hasher.update(left); + hasher.update(right); + return hasher.digest(); +} +function hashLeaf(leaf) { + const hasher = crypto_1.default.createHash('sha256'); + hasher.update(RFC6962_LEAF_HASH_PREFIX); + hasher.update(leaf); + return hasher.digest(); +} +function bufferEqual(a, b) { + try { + return crypto_1.default.timingSafeEqual(a, b); + } + catch { + /* istanbul ignore next */ + return false; + } +} diff --git a/node_modules/npm/node_modules/sigstore/dist/tuf/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/tuf/index.d.ts deleted file mode 100644 index 12b6b7a..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tuf/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import * as sigstore from '../types/sigstore'; -import type { FetchOptions } from '../types/fetch'; -export type TUFOptions = { - cachePath?: string; - mirrorURL?: string; - rootPath?: string; -} & FetchOptions; -export interface TUF { - getTarget(targetName: string): Promise; -} -export declare function getTrustedRoot(options?: TUFOptions): Promise; -export declare class TUFClient implements TUF { - private updater; - constructor(options: TUFOptions); - refresh(): Promise; - getTarget(targetName: string): Promise; -} diff --git a/node_modules/npm/node_modules/sigstore/dist/tuf/target.d.ts b/node_modules/npm/node_modules/sigstore/dist/tuf/target.d.ts deleted file mode 100644 index a00af45..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/tuf/target.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import { Updater } from 'tuf-js'; -export declare function readTarget(tuf: Updater, targetPath: string): Promise; diff --git a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts index 1eeaac2..2be598d 100644 --- a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/index.d.ts @@ -1,24 +1,24 @@ /// -import { ArtifactVerificationOptions, Bundle, Envelope, TransparencyLogEntry, VerificationMaterial } from '@sigstore/protobuf-specs'; -import { x509Certificate } from '../../x509/cert'; -import { WithRequired } from '../utility'; +import { SignatureMaterial } from '../signature'; import { ValidBundle } from './validate'; -import type { Entry } from '../../tlog'; -import type { SignatureMaterial } from '../signature'; -export * from '@sigstore/protobuf-specs'; -export * from './serialized'; -export * from './validate'; +import type { ArtifactVerificationOptions, Envelope, TransparencyLogEntry, VerificationMaterial } from '@sigstore/protobuf-specs'; +import type { Entry } from '../../external/rekor'; +import type { WithRequired } from '../utility'; +import type { SerializedBundle } from './serialized'; +export { Envelope, HashAlgorithm, PublicKeyDetails, SubjectAlternativeNameType, } from '@sigstore/protobuf-specs'; +export type { ArtifactVerificationOptions, ArtifactVerificationOptions_CtlogOptions, ArtifactVerificationOptions_TlogOptions, CertificateAuthority, CertificateIdentities, CertificateIdentity, MessageSignature, ObjectIdentifierValuePair, PublicKey, PublicKeyIdentifier, RFC3161SignedTimestamp, Signature, SubjectAlternativeName, TimestampVerificationData, TransparencyLogEntry, TransparencyLogInstance, TrustedRoot, X509Certificate, X509CertificateChain, } from '@sigstore/protobuf-specs'; +export type { SerializedBundle, SerializedEnvelope } from './serialized'; +export type { ValidBundle as Bundle }; export declare const bundleFromJSON: (obj: any) => ValidBundle; -export type BundleWithVerificationMaterial = WithRequired; -export declare function isBundleWithVerificationMaterial(bundle: Bundle): bundle is BundleWithVerificationMaterial; -export type BundleWithCertificateChain = Bundle & { +export declare const bundleToJSON: (bundle: ValidBundle) => SerializedBundle; +export type BundleWithCertificateChain = ValidBundle & { verificationMaterial: VerificationMaterial & { content: Extract; }; }; -export declare function isBundleWithCertificateChain(bundle: Bundle): bundle is BundleWithCertificateChain; +export declare function isBundleWithCertificateChain(bundle: ValidBundle): bundle is BundleWithCertificateChain; export type RequiredArtifactVerificationOptions = WithRequired; export type CAArtifactVerificationOptions = WithRequired & { signers?: Extract { const bundle = protobuf_specs_1.Bundle.fromJSON(obj); @@ -44,16 +33,15 @@ const bundleFromJSON = (obj) => { return bundle; }; exports.bundleFromJSON = bundleFromJSON; +// eslint-disable-next-line @typescript-eslint/no-explicit-any +const bundleToJSON = (bundle) => { + return protobuf_specs_1.Bundle.toJSON(bundle); +}; +exports.bundleToJSON = bundleToJSON; const BUNDLE_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1'; -// Type guard for narrowing a Bundle to a BundleWithVerificationMaterial -function isBundleWithVerificationMaterial(bundle) { - return bundle.verificationMaterial !== undefined; -} -exports.isBundleWithVerificationMaterial = isBundleWithVerificationMaterial; // Type guard for narrowing a Bundle to a BundleWithCertificateChain function isBundleWithCertificateChain(bundle) { - return (isBundleWithVerificationMaterial(bundle) && - bundle.verificationMaterial.content !== undefined && + return (bundle.verificationMaterial.content !== undefined && bundle.verificationMaterial.content.$case === 'x509CertificateChain'); } exports.isBundleWithCertificateChain = isBundleWithCertificateChain; @@ -69,6 +57,9 @@ function isVerifiableTransparencyLogEntry(entry) { entry.kindVersion !== undefined); } exports.isVerifiableTransparencyLogEntry = isVerifiableTransparencyLogEntry; +// All of the following functions are used to construct a ValidBundle +// from various types of input. When this code moves into the +// @sigstore/sign package, these functions will be exported from there. function toDSSEBundle({ envelope, signature, tlogEntry, timestamp, }) { return { mediaType: BUNDLE_MEDIA_TYPE, @@ -103,8 +94,12 @@ function toMessageSignatureBundle({ digest, signature, tlogEntry, timestamp, }) } exports.toMessageSignatureBundle = toMessageSignatureBundle; function toTransparencyLogEntry(entry) { - const set = Buffer.from(entry.verification.signedEntryTimestamp, 'base64'); + const b64SET = entry.verification?.signedEntryTimestamp || ''; + const set = Buffer.from(b64SET, 'base64'); const logID = Buffer.from(entry.logID, 'hex'); + const proof = entry.verification?.inclusionProof + ? toInclusionProof(entry.verification.inclusionProof) + : undefined; // Parse entry body so we can extract the kind and version. const bodyJSON = util_1.encoding.base64Decode(entry.body); const entryBody = JSON.parse(bodyJSON); @@ -121,10 +116,21 @@ function toTransparencyLogEntry(entry) { kind: entryBody.kind, version: entryBody.apiVersion, }, - inclusionProof: undefined, + inclusionProof: proof, canonicalizedBody: Buffer.from(entry.body, 'base64'), }; } +function toInclusionProof(proof) { + return { + logIndex: proof.logIndex.toString(), + rootHash: Buffer.from(proof.rootHash, 'hex'), + treeSize: proof.treeSize.toString(), + checkpoint: { + envelope: proof.checkpoint, + }, + hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')), + }; +} function toVerificationMaterial({ signature, tlogEntry, timestamp, }) { return { content: signature.certificates @@ -154,11 +160,3 @@ function toTimestampVerificationData(timestamp) { rfc3161Timestamps: [{ signedTimestamp: timestamp }], }; } -function signingCertificate(bundle) { - if (!isBundleWithCertificateChain(bundle)) { - return undefined; - } - const signingCert = bundle.verificationMaterial.content.x509CertificateChain.certificates[0]; - return cert_1.x509Certificate.parse(signingCert.rawBytes); -} -exports.signingCertificate = signingCertificate; diff --git a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts index 31cb2ce..8ea3b5c 100644 --- a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/serialized.d.ts @@ -43,6 +43,7 @@ type SerializedDSSEEnvelope = { keyid: string; }[]; }; +export type { SerializedDSSEEnvelope as SerializedEnvelope }; export type SerializedBundle = { mediaType: string; verificationMaterial: (OneOf<{ @@ -62,13 +63,3 @@ export type SerializedBundle = { dsseEnvelope: SerializedDSSEEnvelope; messageSignature: SerializedMessageSignature; }>; -interface SerializedSignature { - sig: string; - keyid: string; -} -export type SerializedEnvelope = { - payload: string; - payloadType: string; - signatures: SerializedSignature[]; -}; -export {}; diff --git a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts index 7d8316f..a6c33b3 100644 --- a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.d.ts @@ -1,5 +1,5 @@ -import { Bundle, MessageSignature, VerificationMaterial } from '@sigstore/protobuf-specs'; import { WithRequired } from '../utility'; +import type { Bundle, MessageSignature, VerificationMaterial } from '@sigstore/protobuf-specs'; export type ValidBundle = Bundle & { verificationMaterial: VerificationMaterial & { content: NonNullable; diff --git a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.js b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.js index efd873a..a19d8ad 100644 --- a/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.js +++ b/node_modules/npm/node_modules/sigstore/dist/types/sigstore/validate.js @@ -1,6 +1,21 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.assertValidBundle = void 0; +/* +Copyright 2023 The Sigstore Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ const error_1 = require("../../error"); // Performs basic validation of a Sigstore bundle to ensure that all required // fields are populated. This is not a complete validation of the bundle, but diff --git a/node_modules/npm/node_modules/sigstore/dist/util/appdata.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/appdata.d.ts deleted file mode 100644 index dcdaeef..0000000 --- a/node_modules/npm/node_modules/sigstore/dist/util/appdata.d.ts +++ /dev/null @@ -1 +0,0 @@ -export declare function appDataPath(name: string): string; diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/dump.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/dump.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/dump.d.ts diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/dump.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/dump.js similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/dump.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/dump.js diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/error.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/error.d.ts similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/error.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/error.d.ts diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/error.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/error.js similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/error.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/error.js diff --git a/node_modules/npm/node_modules/sigstore/dist/util/asn1/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/index.d.ts new file mode 100644 index 0000000..da45453 --- /dev/null +++ b/node_modules/npm/node_modules/sigstore/dist/util/asn1/index.d.ts @@ -0,0 +1 @@ +export { ASN1Obj } from './obj'; diff --git a/node_modules/npm/node_modules/sigstore/dist/merkle/index.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/index.js similarity index 61% rename from node_modules/npm/node_modules/sigstore/dist/merkle/index.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/index.js index 2dd39f1..348b2ea 100644 --- a/node_modules/npm/node_modules/sigstore/dist/merkle/index.js +++ b/node_modules/npm/node_modules/sigstore/dist/util/asn1/index.js @@ -1,6 +1,8 @@ "use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ASN1Obj = void 0; /* -Copyright 2022 GitHub, Inc +Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -14,9 +16,5 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.verifyInclusion = exports.Hasher = void 0; -var digest_1 = require("./digest"); -Object.defineProperty(exports, "Hasher", { enumerable: true, get: function () { return digest_1.Hasher; } }); -var verify_1 = require("./verify"); -Object.defineProperty(exports, "verifyInclusion", { enumerable: true, get: function () { return verify_1.verifyInclusion; } }); +var obj_1 = require("./obj"); +Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } }); diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/length.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/length.d.ts similarity index 76% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/length.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/length.d.ts index b9c2a2f..97c7114 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/length.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/util/asn1/length.d.ts @@ -1,4 +1,4 @@ /// -import { ByteStream } from '../../util/stream'; +import { ByteStream } from '../stream'; export declare function decodeLength(stream: ByteStream): number; export declare function encodeLength(len: number): Buffer; diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/length.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/length.js similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/length.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/length.js diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/obj.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/obj.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/obj.d.ts diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/obj.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/obj.js similarity index 95% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/obj.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/obj.js index 712acf1..5f9ac9c 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/obj.js +++ b/node_modules/npm/node_modules/sigstore/dist/util/asn1/obj.js @@ -16,7 +16,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -const stream_1 = require("../../util/stream"); +const stream_1 = require("../stream"); const error_1 = require("./error"); const length_1 = require("./length"); const parse_1 = require("./parse"); @@ -132,7 +132,10 @@ function parseStream(stream) { function collectSubs(stream, len) { // Calculate end of object content const end = stream.position + len; - // Make sure there are enough bytes left in the stream + // Make sure there are enough bytes left in the stream. This should never + // happen, cause it'll get caught when the stream is sliced in parseStream. + // Leaving as an extra check just in case. + /* istanbul ignore if */ if (end > stream.length) { throw new error_1.ASN1ParseError('invalid length'); } diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/parse.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/parse.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/parse.d.ts diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/parse.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/parse.js similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/parse.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/parse.js diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/tag.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/tag.d.ts rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/tag.d.ts diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/asn1/tag.js b/node_modules/npm/node_modules/sigstore/dist/util/asn1/tag.js similarity index 100% rename from node_modules/npm/node_modules/sigstore/dist/x509/asn1/tag.js rename to node_modules/npm/node_modules/sigstore/dist/util/asn1/tag.js diff --git a/node_modules/npm/node_modules/sigstore/dist/util/index.d.ts b/node_modules/npm/node_modules/sigstore/dist/util/index.d.ts index 02e4ddc..f062a1c 100644 --- a/node_modules/npm/node_modules/sigstore/dist/util/index.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/util/index.d.ts @@ -1,4 +1,4 @@ -export * as appdata from './appdata'; +export * as asn1 from './asn1'; export * as crypto from './crypto'; export * as dsse from './dsse'; export * as encoding from './encoding'; diff --git a/node_modules/npm/node_modules/sigstore/dist/util/index.js b/node_modules/npm/node_modules/sigstore/dist/util/index.js index 74ef9c0..b7d6ce2 100644 --- a/node_modules/npm/node_modules/sigstore/dist/util/index.js +++ b/node_modules/npm/node_modules/sigstore/dist/util/index.js @@ -23,7 +23,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.appdata = void 0; +exports.ua = exports.promise = exports.pem = exports.oidc = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.asn1 = void 0; /* Copyright 2022 The Sigstore Authors. @@ -39,7 +39,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ -exports.appdata = __importStar(require("./appdata")); +exports.asn1 = __importStar(require("./asn1")); exports.crypto = __importStar(require("./crypto")); exports.dsse = __importStar(require("./dsse")); exports.encoding = __importStar(require("./encoding")); diff --git a/node_modules/npm/node_modules/sigstore/dist/util/stream.js b/node_modules/npm/node_modules/sigstore/dist/util/stream.js index d5c8236..b5c881b 100644 --- a/node_modules/npm/node_modules/sigstore/dist/util/stream.js +++ b/node_modules/npm/node_modules/sigstore/dist/util/stream.js @@ -112,5 +112,5 @@ class ByteStream { this.view = newView; } } -ByteStream.BLOCK_SIZE = 1024; exports.ByteStream = ByteStream; +ByteStream.BLOCK_SIZE = 1024; diff --git a/node_modules/npm/node_modules/sigstore/dist/verify.d.ts b/node_modules/npm/node_modules/sigstore/dist/verify.d.ts index 819d0da..850d0f3 100644 --- a/node_modules/npm/node_modules/sigstore/dist/verify.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/verify.d.ts @@ -5,7 +5,7 @@ export declare class Verifier { private trustedRoot; private keySelector; constructor(trustedRoot: sigstore.TrustedRoot, keySelector?: KeySelector); - verify(bundle: sigstore.ValidBundle, options: sigstore.RequiredArtifactVerificationOptions, data?: Buffer): void; + verify(bundle: sigstore.Bundle, options: sigstore.RequiredArtifactVerificationOptions, data?: Buffer): void; private verifyArtifactSignature; private verifySigningCertificate; private verifyTLogEntries; diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/cert.d.ts b/node_modules/npm/node_modules/sigstore/dist/x509/cert.d.ts index 6f0f2f3..216dbd3 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/cert.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/x509/cert.d.ts @@ -1,6 +1,6 @@ /// import * as sigstore from '../types/sigstore'; -import { ASN1Obj } from './asn1/obj'; +import { ASN1Obj } from '../util/asn1'; import { x509AuthorityKeyIDExtension, x509BasicConstraintsExtension, x509Extension, x509KeyUsageExtension, x509SCTExtension, x509SubjectAlternativeNameExtension, x509SubjectKeyIDExtension } from './ext'; interface SCTVerificationResult { verified: boolean; diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/cert.js b/node_modules/npm/node_modules/sigstore/dist/x509/cert.js index 0b8ab54..ec14b5f 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/cert.js +++ b/node_modules/npm/node_modules/sigstore/dist/x509/cert.js @@ -2,8 +2,8 @@ Object.defineProperty(exports, "__esModule", { value: true }); exports.x509Certificate = void 0; const util_1 = require("../util"); +const asn1_1 = require("../util/asn1"); const stream_1 = require("../util/stream"); -const obj_1 = require("./asn1/obj"); const ext_1 = require("./ext"); const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14'; const EXTENSION_OID_KEY_USAGE = '2.5.29.15'; @@ -33,7 +33,7 @@ class x509Certificate { } static parse(cert) { const der = typeof cert === 'string' ? util_1.pem.toDER(cert) : cert; - const asn1 = obj_1.ASN1Obj.parseBuffer(der); + const asn1 = asn1_1.ASN1Obj.parseBuffer(der); return new x509Certificate(asn1); } get tbsCertificate() { diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/ext.d.ts b/node_modules/npm/node_modules/sigstore/dist/x509/ext.d.ts index afee31a..d6285f3 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/ext.d.ts +++ b/node_modules/npm/node_modules/sigstore/dist/x509/ext.d.ts @@ -1,5 +1,5 @@ /// -import { ASN1Obj } from './asn1/obj'; +import { ASN1Obj } from '../util/asn1'; import { SignedCertificateTimestamp } from './sct'; export declare class x509Extension { protected root: ASN1Obj; diff --git a/node_modules/npm/node_modules/sigstore/dist/x509/ext.js b/node_modules/npm/node_modules/sigstore/dist/x509/ext.js index c1743dc..246aeb0 100644 --- a/node_modules/npm/node_modules/sigstore/dist/x509/ext.js +++ b/node_modules/npm/node_modules/sigstore/dist/x509/ext.js @@ -1,21 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.x509SCTExtension = exports.x509SubjectKeyIDExtension = exports.x509AuthorityKeyIDExtension = exports.x509SubjectAlternativeNameExtension = exports.x509KeyUsageExtension = exports.x509BasicConstraintsExtension = exports.x509Extension = void 0; -/* -Copyright 2023 The Sigstore Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ const stream_1 = require("../util/stream"); const sct_1 = require("./sct"); // https://www.rfc-editor.org/rfc/rfc5280#section-4.1 diff --git a/node_modules/npm/node_modules/sigstore/package.json b/node_modules/npm/node_modules/sigstore/package.json index 2ca34e2..02655a6 100644 --- a/node_modules/npm/node_modules/sigstore/package.json +++ b/node_modules/npm/node_modules/sigstore/package.json @@ -1,10 +1,11 @@ { "name": "sigstore", - "version": "1.5.2", + "version": "1.7.0", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { + "clean": "shx rm -rf dist *.tsbuildinfo", "build": "tsc --build", "test": "jest" }, @@ -29,18 +30,15 @@ "provenance": true }, "devDependencies": { - "@total-typescript/shoehorn": "^0.1.0", + "@sigstore/rekor-types": "^1.0.0", + "@sigstore/jest": "^0.0.0", "@tufjs/repo-mock": "^1.1.0", - "@types/make-fetch-happen": "^10.0.0", - "@types/node": "^20.0.0", - "json-schema-to-typescript": "^13.0.0", - "nock": "^13.2.4", - "typescript": "^5.0.2" + "@types/make-fetch-happen": "^10.0.0" }, "dependencies": { "@sigstore/protobuf-specs": "^0.1.0", - "make-fetch-happen": "^11.0.1", - "tuf-js": "^1.1.3" + "@sigstore/tuf": "^1.0.1", + "make-fetch-happen": "^11.0.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/node_modules/npm/node_modules/string_decoder/package.json b/node_modules/npm/node_modules/string_decoder/package.json index 518c3eb..b2bb141 100644 --- a/node_modules/npm/node_modules/string_decoder/package.json +++ b/node_modules/npm/node_modules/string_decoder/package.json @@ -1,10 +1,13 @@ { "name": "string_decoder", - "version": "1.1.1", + "version": "1.3.0", "description": "The string_decoder module from Node core", "main": "lib/string_decoder.js", + "files": [ + "lib" + ], "dependencies": { - "safe-buffer": "~5.1.0" + "safe-buffer": "~5.2.0" }, "devDependencies": { "babel-polyfill": "^6.23.0", diff --git a/node_modules/npm/node_modules/supports-color/browser.js b/node_modules/npm/node_modules/supports-color/browser.js new file mode 100644 index 0000000..1ffde64 --- /dev/null +++ b/node_modules/npm/node_modules/supports-color/browser.js @@ -0,0 +1,30 @@ +/* eslint-env browser */ + +const level = (() => { + if (navigator.userAgentData) { + const brand = navigator.userAgentData.brands.find(({brand}) => brand === 'Chromium'); + if (brand?.version > 93) { + return 3; + } + } + + if (/\b(Chrome|Chromium)\//.test(navigator.userAgent)) { + return 1; + } + + return 0; +})(); + +const colorSupport = level !== 0 && { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, +}; + +const supportsColor = { + stdout: colorSupport, + stderr: colorSupport, +}; + +export default supportsColor; diff --git a/node_modules/npm/node_modules/supports-color/index.js b/node_modules/npm/node_modules/supports-color/index.js new file mode 100644 index 0000000..ca95e9f --- /dev/null +++ b/node_modules/npm/node_modules/supports-color/index.js @@ -0,0 +1,182 @@ +import process from 'node:process'; +import os from 'node:os'; +import tty from 'node:tty'; + +// From: https://github.com/sindresorhus/has-flag/blob/main/index.js +/// function hasFlag(flag, argv = globalThis.Deno?.args ?? process.argv) { +function hasFlag(flag, argv = globalThis.Deno ? globalThis.Deno.args : process.argv) { + const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--'); + const position = argv.indexOf(prefix + flag); + const terminatorPosition = argv.indexOf('--'); + return position !== -1 && (terminatorPosition === -1 || position < terminatorPosition); +} + +const {env} = process; + +let flagForceColor; +if ( + hasFlag('no-color') + || hasFlag('no-colors') + || hasFlag('color=false') + || hasFlag('color=never') +) { + flagForceColor = 0; +} else if ( + hasFlag('color') + || hasFlag('colors') + || hasFlag('color=true') + || hasFlag('color=always') +) { + flagForceColor = 1; +} + +function envForceColor() { + if ('FORCE_COLOR' in env) { + if (env.FORCE_COLOR === 'true') { + return 1; + } + + if (env.FORCE_COLOR === 'false') { + return 0; + } + + return env.FORCE_COLOR.length === 0 ? 1 : Math.min(Number.parseInt(env.FORCE_COLOR, 10), 3); + } +} + +function translateLevel(level) { + if (level === 0) { + return false; + } + + return { + level, + hasBasic: true, + has256: level >= 2, + has16m: level >= 3, + }; +} + +function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) { + const noFlagForceColor = envForceColor(); + if (noFlagForceColor !== undefined) { + flagForceColor = noFlagForceColor; + } + + const forceColor = sniffFlags ? flagForceColor : noFlagForceColor; + + if (forceColor === 0) { + return 0; + } + + if (sniffFlags) { + if (hasFlag('color=16m') + || hasFlag('color=full') + || hasFlag('color=truecolor')) { + return 3; + } + + if (hasFlag('color=256')) { + return 2; + } + } + + // Check for Azure DevOps pipelines. + // Has to be above the `!streamIsTTY` check. + if ('TF_BUILD' in env && 'AGENT_NAME' in env) { + return 1; + } + + if (haveStream && !streamIsTTY && forceColor === undefined) { + return 0; + } + + const min = forceColor || 0; + + if (env.TERM === 'dumb') { + return min; + } + + if (process.platform === 'win32') { + // Windows 10 build 10586 is the first Windows release that supports 256 colors. + // Windows 10 build 14931 is the first release that supports 16m/TrueColor. + const osRelease = os.release().split('.'); + if ( + Number(osRelease[0]) >= 10 + && Number(osRelease[2]) >= 10_586 + ) { + return Number(osRelease[2]) >= 14_931 ? 3 : 2; + } + + return 1; + } + + if ('CI' in env) { + if ('GITHUB_ACTIONS' in env) { + return 3; + } + + if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI', 'BUILDKITE', 'DRONE'].some(sign => sign in env) || env.CI_NAME === 'codeship') { + return 1; + } + + return min; + } + + if ('TEAMCITY_VERSION' in env) { + return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0; + } + + if (env.COLORTERM === 'truecolor') { + return 3; + } + + if (env.TERM === 'xterm-kitty') { + return 3; + } + + if ('TERM_PROGRAM' in env) { + const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10); + + switch (env.TERM_PROGRAM) { + case 'iTerm.app': { + return version >= 3 ? 3 : 2; + } + + case 'Apple_Terminal': { + return 2; + } + // No default + } + } + + if (/-256(color)?$/i.test(env.TERM)) { + return 2; + } + + if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) { + return 1; + } + + if ('COLORTERM' in env) { + return 1; + } + + return min; +} + +export function createSupportsColor(stream, options = {}) { + const level = _supportsColor(stream, { + streamIsTTY: stream && stream.isTTY, + ...options, + }); + + return translateLevel(level); +} + +const supportsColor = { + stdout: createSupportsColor({isTTY: tty.isatty(1)}), + stderr: createSupportsColor({isTTY: tty.isatty(2)}), +}; + +export default supportsColor; diff --git a/node_modules/npm/node_modules/supports-color/license b/node_modules/npm/node_modules/supports-color/license new file mode 100644 index 0000000..fa7ceba --- /dev/null +++ b/node_modules/npm/node_modules/supports-color/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/npm/node_modules/supports-color/package.json b/node_modules/npm/node_modules/supports-color/package.json new file mode 100644 index 0000000..eb6011c --- /dev/null +++ b/node_modules/npm/node_modules/supports-color/package.json @@ -0,0 +1,61 @@ +{ + "name": "supports-color", + "version": "9.3.1", + "description": "Detect whether a terminal supports color", + "license": "MIT", + "repository": "chalk/supports-color", + "funding": "https://github.com/chalk/supports-color?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": { + "node": "./index.js", + "default": "./browser.js" + }, + "engines": { + "node": ">=12" + }, + "scripts": { + "//test": "xo && ava && tsd", + "test": "xo && tsd" + }, + "files": [ + "index.js", + "index.d.ts", + "browser.js", + "browser.d.ts" + ], + "keywords": [ + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "ansi", + "styles", + "tty", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "support", + "supports", + "capability", + "detect", + "truecolor", + "16m" + ], + "devDependencies": { + "@types/node": "^16.11.7", + "ava": "^3.15.0", + "import-fresh": "^3.3.0", + "tsd": "^0.18.0", + "typescript": "^4.4.3", + "xo": "^0.49.0" + } +} diff --git a/node_modules/npm/node_modules/tar/lib/normalize-unicode.js b/node_modules/npm/node_modules/tar/lib/normalize-unicode.js index 43dc406..79e285a 100644 --- a/node_modules/npm/node_modules/tar/lib/normalize-unicode.js +++ b/node_modules/npm/node_modules/tar/lib/normalize-unicode.js @@ -6,7 +6,7 @@ const normalizeCache = Object.create(null) const { hasOwnProperty } = Object.prototype module.exports = s => { if (!hasOwnProperty.call(normalizeCache, s)) { - normalizeCache[s] = s.normalize('NFKD') + normalizeCache[s] = s.normalize('NFD') } return normalizeCache[s] } diff --git a/node_modules/npm/node_modules/tar/lib/path-reservations.js b/node_modules/npm/node_modules/tar/lib/path-reservations.js index ef380ca..8d349d5 100644 --- a/node_modules/npm/node_modules/tar/lib/path-reservations.js +++ b/node_modules/npm/node_modules/tar/lib/path-reservations.js @@ -123,7 +123,7 @@ module.exports = () => { // effectively removing all parallelization on windows. paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { // don't need normPath, because we skip this entirely for windows - return normalize(stripSlashes(join(p))).toLowerCase() + return stripSlashes(join(normalize(p))).toLowerCase() }) const dirs = new Set( diff --git a/node_modules/npm/node_modules/tar/lib/unpack.js b/node_modules/npm/node_modules/tar/lib/unpack.js index e341ad0..fa46611 100644 --- a/node_modules/npm/node_modules/tar/lib/unpack.js +++ b/node_modules/npm/node_modules/tar/lib/unpack.js @@ -105,7 +105,7 @@ const uint32 = (a, b, c) => // Note that on windows, we always drop the entire cache whenever a // symbolic link is encountered, because 8.3 filenames are impossible // to reason about, and collisions are hazards rather than just failures. -const cacheKeyNormalize = path => normalize(stripSlash(normPath(path))) +const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) .toLowerCase() const pruneCache = (cache, abs) => { diff --git a/node_modules/npm/node_modules/tar/package.json b/node_modules/npm/node_modules/tar/package.json index 943c8b4..f59f54a 100644 --- a/node_modules/npm/node_modules/tar/package.json +++ b/node_modules/npm/node_modules/tar/package.json @@ -2,7 +2,7 @@ "author": "GitHub Inc.", "name": "tar", "description": "tar for node", - "version": "6.1.14", + "version": "6.1.15", "repository": { "type": "git", "url": "https://github.com/isaacs/node-tar.git" diff --git a/node_modules/npm/node_modules/tuf-js/dist/updater.js b/node_modules/npm/node_modules/tuf-js/dist/updater.js index 71fa498..2aba48d 100644 --- a/node_modules/npm/node_modules/tuf-js/dist/updater.js +++ b/node_modules/npm/node_modules/tuf-js/dist/updater.js @@ -54,6 +54,8 @@ class Updater { retries: this.config.fetchRetries, }); } + // refresh and load the metadata before downloading the target + // refresh should be called once after the client is initialized async refresh() { await this.loadRoot(); await this.loadTimestamp(); @@ -102,7 +104,7 @@ class Updater { } try { if (fs.existsSync(filePath)) { - targetInfo.verify(fs.createReadStream(filePath)); + await targetInfo.verify(fs.createReadStream(filePath)); return filePath; } } diff --git a/node_modules/npm/node_modules/tuf-js/package.json b/node_modules/npm/node_modules/tuf-js/package.json index c1134af..9187d88 100644 --- a/node_modules/npm/node_modules/tuf-js/package.json +++ b/node_modules/npm/node_modules/tuf-js/package.json @@ -1,6 +1,6 @@ { "name": "tuf-js", - "version": "1.1.6", + "version": "1.1.7", "description": "JavaScript implementation of The Update Framework (TUF)", "main": "dist/index.js", "types": "dist/index.d.ts", @@ -29,16 +29,16 @@ "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme", "devDependencies": { "@tufjs/repo-mock": "1.3.1", - "@types/debug": "^4.1.7", + "@types/debug": "^4.1.8", "@types/make-fetch-happen": "^10.0.1", - "@types/node": "^20.1.1", + "@types/node": "^20.2.5", "nock": "^13.3.1", - "typescript": "^5.0.4" + "typescript": "^5.1.3" }, "dependencies": { "@tufjs/models": "1.0.4", "debug": "^4.3.4", - "make-fetch-happen": "^11.1.0" + "make-fetch-happen": "^11.1.1" }, "engines": { "node": "^14.17.0 || ^16.13.0 || >=18.0.0" diff --git a/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/index.js b/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/index.js index ef3c095..ba19750 100644 --- a/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/index.js +++ b/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/index.js @@ -1,9 +1,14 @@ import ansiRegex from 'ansi-regex'; +const regex = ansiRegex(); + export default function stripAnsi(string) { if (typeof string !== 'string') { throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); } - return string.replace(ansiRegex(), ''); + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); } diff --git a/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/package.json b/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/package.json index 0de0586..e1f455c 100644 --- a/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/package.json +++ b/node_modules/npm/node_modules/wrap-ansi/node_modules/strip-ansi/package.json @@ -1,6 +1,6 @@ { "name": "strip-ansi", - "version": "7.0.1", + "version": "7.1.0", "description": "Strip ANSI escape codes from a string", "license": "MIT", "repository": "chalk/strip-ansi", diff --git a/node_modules/npm/package.json b/node_modules/npm/package.json index f417d60..c6ab802 100644 --- a/node_modules/npm/package.json +++ b/node_modules/npm/package.json @@ -1,5 +1,5 @@ { - "version": "9.7.1", + "version": "9.8.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -33,9 +33,7 @@ "url": "https://github.com/npm/cli/issues" }, "directories": { - "bin": "./bin", "doc": "./doc", - "lib": "./lib", "man": "./man" }, "main": "./index.js", @@ -54,14 +52,14 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^6.2.9", - "@npmcli/config": "^6.2.0", + "@npmcli/arborist": "^6.3.0", + "@npmcli/config": "^6.2.1", "@npmcli/map-workspaces": "^3.0.4", - "@npmcli/package-json": "^3.1.1", + "@npmcli/package-json": "^4.0.0", "@npmcli/run-script": "^6.0.2", "abbrev": "^2.0.0", "archy": "~1.0.0", - "cacache": "^17.1.2", + "cacache": "^17.1.3", "chalk": "^5.2.0", "ci-info": "^3.8.0", "cli-columns": "^4.0.0", @@ -69,21 +67,21 @@ "columnify": "^1.6.0", "fastest-levenshtein": "^1.0.16", "fs-minipass": "^3.0.2", - "glob": "^10.2.4", + "glob": "^10.2.7", "graceful-fs": "^4.2.11", "hosted-git-info": "^6.1.1", - "ini": "^4.1.0", + "ini": "^4.1.1", "init-package-json": "^5.0.0", "is-cidr": "^4.0.2", "json-parse-even-better-errors": "^3.0.0", "libnpmaccess": "^7.0.2", - "libnpmdiff": "^5.0.17", - "libnpmexec": "^6.0.0", - "libnpmfund": "^4.0.17", + "libnpmdiff": "^5.0.19", + "libnpmexec": "^6.0.2", + "libnpmfund": "^4.0.19", "libnpmhook": "^9.0.3", "libnpmorg": "^5.0.4", - "libnpmpack": "^5.0.17", - "libnpmpublish": "^7.3.0", + "libnpmpack": "^5.0.19", + "libnpmpublish": "^7.5.0", "libnpmsearch": "^6.0.2", "libnpmteam": "^5.0.3", "libnpmversion": "^4.0.2", @@ -92,8 +90,8 @@ "minipass": "^5.0.0", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", - "node-gyp": "^9.3.1", - "nopt": "^7.1.0", + "node-gyp": "^9.4.0", + "nopt": "^7.2.0", "npm-audit-report": "^5.0.0", "npm-install-checks": "^6.1.1", "npm-package-arg": "^10.1.0", @@ -103,15 +101,16 @@ "npm-user-validate": "^2.0.0", "npmlog": "^7.0.1", "p-map": "^4.0.0", - "pacote": "^15.1.3", + "pacote": "^15.2.0", "parse-conflict-json": "^3.0.1", "proc-log": "^3.0.0", "qrcode-terminal": "^0.12.0", "read": "^2.1.0", - "semver": "^7.5.1", - "sigstore": "^1.5.0", + "semver": "^7.5.2", + "sigstore": "^1.7.0", "ssri": "^10.0.4", - "tar": "^6.1.14", + "supports-color": "^9.3.1", + "tar": "^6.1.15", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", @@ -178,6 +177,7 @@ "semver", "sigstore", "ssri", + "supports-color", "tar", "text-table", "tiny-relative-date", @@ -196,6 +196,7 @@ "@npmcli/promise-spawn": "^6.0.2", "@npmcli/template-oss": "4.14.1", "@tufjs/repo-mock": "^1.3.1", + "diff": "^5.1.0", "licensee": "^10.0.0", "nock": "^13.3.0", "npm-packlist": "^7.0.4", @@ -210,17 +211,14 @@ "dumpconf": "env | grep npm | sort | uniq", "licenses": "licensee --production --errors-only", "test": "tap", + "test:nocolor": "CI=true tap -Rclassic", "test-all": "node . run test -ws -iwr --if-present", "snap": "tap", "prepack": "node . run build -w docs", - "test:nocleanup": "NO_TEST_CLEANUP=1 node . run test --", - "sudotest": "sudo node . run run test --", - "sudotest:nocleanup": "sudo NO_TEST_CLEANUP=1 node . run test --", "posttest": "node . run lint", "lint": "eslint \"**/*.js\"", "lintfix": "node . run lint -- --fix", "lint-all": "node . run lint -ws -iwr --if-present", - "prelint": "rimraf test/npm_cache*", "resetdeps": "node scripts/resetdeps.js", "rp-pull-request": "node scripts/update-authors.js", "postlint": "template-oss-check", @@ -230,8 +228,6 @@ "test-env": [ "LC_ALL=sk" ], - "color": 1, - "files": "test/{lib,bin,index.js}", "timeout": 600, "nyc-arg": [ "--exclude", diff --git a/public/.DS_Store b/public/.DS_Store index 545b9fa..681d76a 100644 Binary files a/public/.DS_Store and b/public/.DS_Store differ diff --git a/public/css/style.css b/public/css/style.css index 94305cc..6b92189 100644 --- a/public/css/style.css +++ b/public/css/style.css @@ -1,5 +1,6 @@ +/* Font imports */ @import url('https://fonts.cdnfonts.com/css/helvetica-neue-9'); -@import url('https://fonts.cdnfonts.com/css/georgia'); +@import url('https://fonts.googleapis.com/css2?family=Inter:wght@100;200;300;400;500;600;700;800;900&family=Literata:ital,opsz,wght@0,7..72,200;0,7..72,300;0,7..72,400;0,7..72,500;0,7..72,600;1,7..72,200;1,7..72,300;1,7..72,400;1,7..72,500&display=swap'); /* This text is in Helvetica */ @@ -12,9 +13,10 @@ font-family: 'Helvetica 65 Medium', sans-serif; } -/* This text is in Georgia */ -.georgia { - font-family: 'Georgia', serif; +/* This text is in Literata */ +.literata { + font-family: 'Inter', sans-serif; + font-family: 'Literata', serif; } /* This text is in Italic for placeholders */