From 7af81c7360a6df31cdb0a8f18104b42656166378 Mon Sep 17 00:00:00 2001 From: Luke Karrys Date: Tue, 15 Aug 2023 12:09:47 -0700 Subject: [PATCH] deps: cacache@18.0.0 --- node_modules/.gitignore | 25 + .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node_modules/cacache/lib/put.js | 80 + .../node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/cacache/package.json | 82 + node_modules/cacache/lib/memoization.js | 4 +- .../cacache/node_modules/lru-cache/LICENSE | 15 + .../node_modules/lru-cache/dist/cjs/index.js | 1404 +++++++++++++++++ .../lru-cache/dist/cjs/index.min.js | 2 + .../lru-cache/dist/cjs/package.json | 3 + .../node_modules/lru-cache/dist/mjs/index.js | 1400 ++++++++++++++++ .../lru-cache/dist/mjs/index.min.js | 2 + .../lru-cache/dist/mjs/package.json | 3 + .../node_modules/lru-cache/package.json | 108 ++ node_modules/cacache/package.json | 14 +- .../node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node_modules/cacache/lib/put.js | 80 + .../node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/cacache/package.json | 82 + .../node-gyp/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../node-gyp/node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../node-gyp/node_modules/cacache/lib/put.js | 80 + .../node-gyp/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../node_modules/brace-expansion/LICENSE | 21 + .../node_modules/brace-expansion/index.js | 203 +++ .../node_modules/brace-expansion/package.json | 46 + .../cacache/node_modules/glob/LICENSE | 15 + .../node_modules/glob/dist/cjs/package.json | 4 + .../node_modules/glob/dist/cjs/src/bin.js | 270 ++++ .../node_modules/glob/dist/cjs/src/glob.js | 238 +++ .../glob/dist/cjs/src/has-magic.js | 27 + .../node_modules/glob/dist/cjs/src/ignore.js | 103 ++ .../node_modules/glob/dist/cjs/src/index.js | 68 + .../node_modules/glob/dist/cjs/src/pattern.js | 219 +++ .../glob/dist/cjs/src/processor.js | 309 ++++ .../node_modules/glob/dist/cjs/src/walker.js | 358 +++++ .../node_modules/glob/dist/mjs/glob.js | 234 +++ .../node_modules/glob/dist/mjs/has-magic.js | 23 + .../node_modules/glob/dist/mjs/ignore.js | 99 ++ .../node_modules/glob/dist/mjs/index.js | 56 + .../node_modules/glob/dist/mjs/package.json | 4 + .../node_modules/glob/dist/mjs/pattern.js | 215 +++ .../node_modules/glob/dist/mjs/processor.js | 302 ++++ .../node_modules/glob/dist/mjs/walker.js | 352 +++++ .../cacache/node_modules/glob/package.json | 98 ++ .../cacache/node_modules/minimatch/LICENSE | 15 + .../dist/cjs/assert-valid-pattern.js | 14 + .../node_modules/minimatch/dist/cjs/ast.js | 589 +++++++ .../minimatch/dist/cjs/brace-expressions.js | 152 ++ .../node_modules/minimatch/dist/cjs/escape.js | 22 + .../node_modules/minimatch/dist/cjs/index.js | 1011 ++++++++++++ .../minimatch/dist/cjs/package.json | 3 + .../minimatch/dist/cjs/unescape.js | 24 + .../dist/mjs/assert-valid-pattern.js | 10 + .../node_modules/minimatch/dist/mjs/ast.js | 585 +++++++ .../minimatch/dist/mjs/brace-expressions.js | 148 ++ .../node_modules/minimatch/dist/mjs/escape.js | 18 + .../node_modules/minimatch/dist/mjs/index.js | 995 ++++++++++++ .../minimatch/dist/mjs/package.json | 3 + .../minimatch/dist/mjs/unescape.js | 20 + .../node_modules/minimatch/package.json | 86 + .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../node_modules/cacache/package.json | 82 + .../pacote/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../pacote/node_modules/cacache/lib/get.js | 170 ++ .../pacote/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../pacote/node_modules/cacache/lib/put.js | 80 + .../pacote/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../pacote/node_modules/cacache/lib/verify.js | 257 +++ .../pacote/node_modules/cacache/package.json | 82 + .../sigstore/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../sigstore/node_modules/cacache/lib/get.js | 170 ++ .../node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../sigstore/node_modules/cacache/lib/put.js | 80 + .../sigstore/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../node_modules/cacache/lib/verify.js | 257 +++ .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../node_modules/cacache/package.json | 82 + .../tuf-js/node_modules/cacache/LICENSE.md | 16 + .../node_modules/cacache/lib/content/path.js | 29 + .../node_modules/cacache/lib/content/read.js | 166 ++ .../node_modules/cacache/lib/content/rm.js | 18 + .../node_modules/cacache/lib/content/write.js | 205 +++ .../node_modules/cacache/lib/entry-index.js | 330 ++++ .../tuf-js/node_modules/cacache/lib/get.js | 170 ++ .../tuf-js/node_modules/cacache/lib/index.js | 42 + .../node_modules/cacache/lib/memoization.js | 72 + .../tuf-js/node_modules/cacache/lib/put.js | 80 + .../tuf-js/node_modules/cacache/lib/rm.js | 31 + .../node_modules/cacache/lib/util/glob.js | 7 + .../cacache/lib/util/hash-to-segments.js | 7 + .../node_modules/cacache/lib/util/tmp.js | 26 + .../tuf-js/node_modules/cacache/lib/verify.js | 257 +++ .../cacache/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/dist/cjs/index.js | 1028 ++++++++++++ .../minipass/dist/cjs/package.json | 3 + .../node_modules/minipass/dist/mjs/index.js | 1018 ++++++++++++ .../minipass/dist/mjs/package.json | 3 + .../node_modules/minipass/package.json | 82 + .../tuf-js/node_modules/cacache/package.json | 82 + package-lock.json | 233 ++- package.json | 2 +- workspaces/arborist/package.json | 2 +- 166 files changed, 25836 insertions(+), 15 deletions(-) create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js create mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/LICENSE create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json create mode 100644 node_modules/cacache/node_modules/lru-cache/package.json create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/get.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/index.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/put.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/LICENSE.md create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/get.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/put.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/rm.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/node-gyp/node_modules/cacache/lib/verify.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json create mode 100755 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/node-gyp/node_modules/cacache/package.json create mode 100644 node_modules/pacote/node_modules/cacache/LICENSE.md create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js create mode 100644 node_modules/pacote/node_modules/cacache/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/LICENSE.md create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/get.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/put.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/rm.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/sigstore/node_modules/cacache/lib/verify.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/sigstore/node_modules/cacache/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/LICENSE.md create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/path.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/read.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/rm.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/content/write.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/entry-index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/get.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/memoization.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/put.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/rm.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/glob.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js create mode 100644 node_modules/tuf-js/node_modules/cacache/lib/verify.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json create mode 100644 node_modules/tuf-js/node_modules/cacache/package.json diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 55611b7a54162..6b6963e6939f7 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -28,6 +28,9 @@ !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator +!/@npmcli/metavuln-calculator/node_modules/ +/@npmcli/metavuln-calculator/node_modules/* +!/@npmcli/metavuln-calculator/node_modules/cacache !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -71,6 +74,9 @@ !/buffer !/builtins !/cacache +!/cacache/node_modules/ +/cacache/node_modules/* +!/cacache/node_modules/lru-cache !/chalk !/chownr !/ci-info @@ -146,6 +152,9 @@ !/just-diff !/lru-cache !/make-fetch-happen +!/make-fetch-happen/node_modules/ +/make-fetch-happen/node_modules/* +!/make-fetch-happen/node_modules/cacache !/minimatch !/minipass-collect !/minipass-collect/node_modules/ @@ -183,6 +192,13 @@ !/node-gyp/node_modules/abbrev !/node-gyp/node_modules/are-we-there-yet !/node-gyp/node_modules/brace-expansion +!/node-gyp/node_modules/cacache +!/node-gyp/node_modules/cacache/node_modules/ +/node-gyp/node_modules/cacache/node_modules/* +!/node-gyp/node_modules/cacache/node_modules/brace-expansion +!/node-gyp/node_modules/cacache/node_modules/glob +!/node-gyp/node_modules/cacache/node_modules/minimatch +!/node-gyp/node_modules/cacache/node_modules/minipass !/node-gyp/node_modules/gauge !/node-gyp/node_modules/glob !/node-gyp/node_modules/make-fetch-happen @@ -214,6 +230,7 @@ !/pacote/node_modules/@npmcli/ /pacote/node_modules/@npmcli/* !/pacote/node_modules/@npmcli/git +!/pacote/node_modules/cacache !/parse-conflict-json !/path-is-absolute !/path-key @@ -255,6 +272,10 @@ !/sigstore !/sigstore/node_modules/ /sigstore/node_modules/* +!/sigstore/node_modules/cacache +!/sigstore/node_modules/cacache/node_modules/ +/sigstore/node_modules/cacache/node_modules/* +!/sigstore/node_modules/cacache/node_modules/minipass !/sigstore/node_modules/make-fetch-happen !/sigstore/node_modules/minipass !/smart-buffer @@ -285,6 +306,10 @@ !/tuf-js !/tuf-js/node_modules/ /tuf-js/node_modules/* +!/tuf-js/node_modules/cacache +!/tuf-js/node_modules/cacache/node_modules/ +/tuf-js/node_modules/cacache/node_modules/* +!/tuf-js/node_modules/cacache/node_modules/minipass !/tuf-js/node_modules/make-fetch-happen !/tuf-js/node_modules/minipass !/unique-filename diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json new file mode 100644 index 0000000000000..ab58cb8b7c50f --- /dev/null +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "17.1.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^7.7.1", + "minipass": "^7.0.3", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.18.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/cacache/lib/memoization.js b/node_modules/cacache/lib/memoization.js index 0ff604a479c9c..2ecc60912e456 100644 --- a/node_modules/cacache/lib/memoization.js +++ b/node_modules/cacache/lib/memoization.js @@ -1,8 +1,8 @@ 'use strict' -const LRU = require('lru-cache') +const { LRUCache } = require('lru-cache') -const MEMOIZED = new LRU({ +const MEMOIZED = new LRUCache({ max: 500, maxSize: 50 * 1024 * 1024, // 50MB ttl: 3 * 60 * 1000, // 3 minutes diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/cacache/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js new file mode 100644 index 0000000000000..02d76ec800a92 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js @@ -0,0 +1,1404 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js new file mode 100644 index 0000000000000..8d34a03041d25 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js new file mode 100644 index 0000000000000..23b9754ad6c76 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js @@ -0,0 +1,1400 @@ +/** + * @module LRUCache + */ +const perf = typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? process : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + #max; + #maxSize; + #dispose; + #disposeAfter; + #fetchMethod; + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index]); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (ttl === 0 || start === 0) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + return (ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index]); + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i]); + deleted = true; + } + } + return deleted; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.delete(k); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index !== undefined && + (allowStale || !this.#isStale(index))) { + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.delete(k); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.delete(k); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.clear(); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#next[this.#prev[index]] = this.#next[index]; + this.#prev[this.#next[index]] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, 'delete'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'delete']); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js new file mode 100644 index 0000000000000..5a16b3940d6df --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js @@ -0,0 +1,2 @@ +var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..bae4a04839d1f --- /dev/null +++ b/node_modules/cacache/node_modules/lru-cache/package.json @@ -0,0 +1,108 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "10.0.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "exports": { + "./min": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" + } + }, + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^20.2.5", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", + "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "tslib": "^2.4.0", + "typedoc": "^0.24.6", + "typescript": "^5.0.4" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "14 || >=16.14" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--expose-gc", + "-r", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./dist/mjs/index.js" + } + ] +} diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json index ab58cb8b7c50f..1b14bf4bd1490 100644 --- a/node_modules/cacache/package.json +++ b/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "17.1.4", + "version": "18.0.0", "cache-version": { "content": "2", "index": "5" @@ -48,7 +48,7 @@ "@npmcli/fs": "^3.1.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", - "lru-cache": "^7.7.1", + "lru-cache": "^10.0.1", "minipass": "^7.0.3", "minipass-collect": "^1.0.2", "minipass-flush": "^1.0.5", @@ -64,13 +64,19 @@ "tap": "^16.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^16.14.0 || >=18.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, "version": "4.18.0", - "publish": "true" + "publish": "true", + "ciVersions": [ + "16.14.0", + "16.x", + "18.0.0", + "18.x" + ] }, "author": "GitHub Inc.", "tap": { diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json new file mode 100644 index 0000000000000..ab58cb8b7c50f --- /dev/null +++ b/node_modules/make-fetch-happen/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "17.1.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.js\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^7.7.1", + "minipass": "^7.0.3", + "minipass-collect": "^1.0.2", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.18.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 0000000000000..8d28acf866d93 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 0000000000000..ad5a76a4f73f2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 0000000000000..f41b539df65dc --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,166 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && stat.size !== size) { + throw sizeError(size, stat.size) + } + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // just stat to ensure it exists + const stat = await fs.stat(cpath) + return { stat, cpath, sri } + }) + if (typeof size === 'number' && size !== stat.size) { + return stream.emit('error', sizeError(size, stat.size)) + } + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath, sri) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 0000000000000..ce58d679e4cb2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 0000000000000..7146146581287 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,205 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri, opts) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 0000000000000..722a37af5ce15 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,330 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await Promise.all(buckets.map(async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await Promise.all(subbuckets.map(async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await Promise.all(subbucketEntries.map(async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + })) + })) + })) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data, filter) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/get.js b/node_modules/node-gyp/node_modules/cacache/lib/get.js new file mode 100644 index 0000000000000..80ec206c7ecaa --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/node_modules/node-gyp/node_modules/cacache/lib/index.js b/node_modules/node-gyp/node_modules/cacache/lib/index.js new file mode 100644 index 0000000000000..c9b0da5f3a271 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 0000000000000..0ff604a479c9c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const LRU = require('lru-cache') + +const MEMOIZED = new LRU({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/put.js b/node_modules/node-gyp/node_modules/cacache/lib/put.js new file mode 100644 index 0000000000000..9fc932d5f6dec --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/rm.js new file mode 100644 index 0000000000000..a94760c7cf243 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js new file mode 100644 index 0000000000000..8500c1c16a429 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 0000000000000..445599b503808 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 0000000000000..0bf5302136ebe --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 0000000000000..62e85c946490f --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime (cache, opts) { + return { startTime: new Date() } +} + +async function markEndTime (cache, opts) { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats, opts) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000000000..de3226673c387 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js new file mode 100644 index 0000000000000..4af9ddee463f4 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/index.js @@ -0,0 +1,203 @@ +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m) return [str]; + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + if (/\$$/.test(m.pre)) { + for (var k = 0; k < post.length; k++) { + var expansion = pre+ '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } else { + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = []; + + for (var j = 0; j < n.length; j++) { + N.push.apply(N, expand(n[j], false)); + } + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + } + + return expansions; +} + diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json new file mode 100644 index 0000000000000..7097d41e39de5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion/package.json @@ -0,0 +1,46 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "2.0.1", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0" + }, + "devDependencies": { + "@c4312/matcha": "^1.3.1", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE new file mode 100644 index 0000000000000..ec7df93329abf --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json new file mode 100644 index 0000000000000..c15df94a3582b --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "commonjs" +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js new file mode 100755 index 0000000000000..4a8a88f2734d2 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/bin.js @@ -0,0 +1,270 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const foreground_child_1 = require("foreground-child"); +const fs_1 = require("fs"); +const jackspeak_1 = require("jackspeak"); +const package_json_1 = require("../package.json"); +const index_js_1 = require("./index.js"); +const j = (0, jackspeak_1.jack)({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${package_json_1.version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validate: v => new Set([ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ]).has(v), + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, +}) + .flag({ + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all + ? positionals + : positionals.filter(p => !(0, fs_1.existsSync)(p)); + const matches = values.all ? [] : positionals.filter(p => (0, fs_1.existsSync)(p)); + const stream = (0, index_js_1.globStream)(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => (0, foreground_child_1.foregroundChild)(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js new file mode 100644 index 0000000000000..eb37c6b9a6601 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/glob.js @@ -0,0 +1,238 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const path_scurry_1 = require("path-scurry"); +const url_1 = require("url"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' + ? path_scurry_1.PathScurryDarwin + : opts.platform + ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new pattern_js_1.Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js new file mode 100644 index 0000000000000..0918bd57e0f1c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js new file mode 100644 index 0000000000000..0cbcca335e1cc --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/ignore.js @@ -0,0 +1,103 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new minimatch_1.Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new pattern_js_1.Pattern(parsed, globParts, 0, platform); + const m = new minimatch_1.Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js new file mode 100644 index 0000000000000..71c31c03dd339 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = void 0; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +exports.globStreamSync = globStreamSync; +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +exports.globStream = globStream; +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +exports.globSync = globSync; +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +exports.globIterateSync = globIterateSync; +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +exports.globIterate = globIterate; +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +/* c8 ignore stop */ +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js new file mode 100644 index 0000000000000..181371293d860 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js new file mode 100644 index 0000000000000..bd067e9b9033d --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/processor.js @@ -0,0 +1,309 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js new file mode 100644 index 0000000000000..9651ce1164016 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/cjs/src/walker.js @@ -0,0 +1,358 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) + ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js new file mode 100644 index 0000000000000..8ff26154427be --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/glob.js @@ -0,0 +1,234 @@ +import { Minimatch } from 'minimatch'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { fileURLToPath } from 'url'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' + ? PathScurryWin32 + : opts.platform === 'darwin' + ? PathScurryDarwin + : opts.platform + ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + return new Pattern(set, globParts[i], 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity + ? this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js new file mode 100644 index 0000000000000..ba2321ab868d0 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js new file mode 100644 index 0000000000000..2dbaa16a11460 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/ignore.js @@ -0,0 +1,99 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = typeof process === 'object' && + process && + typeof process.platform === 'string' + ? process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + const mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + for (const ign of ignored) { + const mm = new Minimatch(ign, mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + const p = new Pattern(parsed, globParts, 0, platform); + const m = new Minimatch(p.globString(), mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js new file mode 100644 index 0000000000000..7b270117e740a --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/index.js @@ -0,0 +1,56 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +/* c8 ignore start */ +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +/* c8 ignore stop */ +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json new file mode 100644 index 0000000000000..5cc80943d565b --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/package.json @@ -0,0 +1,4 @@ +{ + "version": "10.3.3", + "type": "module" +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js new file mode 100644 index 0000000000000..60aa415d92fd1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 + ? this.isAbsolute() + ? this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined + ? this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined + ? this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined + ? this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return typeof p === 'string' && this.isAbsolute() && this.#index === 0 + ? p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js new file mode 100644 index 0000000000000..dd2228ad6761a --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/processor.js @@ -0,0 +1,302 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = hasWalkedCache + ? hasWalkedCache.copy() + : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined + ? this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + // we can be reasonably sure that .. is a readable dir + if (c.isUnknown() && p !== '..') + break; + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must be final entry + if (!rest) { + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + } + else { + this.subwalks.add(t, pattern); + } + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js new file mode 100644 index 0000000000000..6f3358b0c39a3 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/dist/mjs/walker.js @@ -0,0 +1,352 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' + ? new Ignore([ignore], opts) + : Array.isArray(ignore) + ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + if (opts.ignore) { + this.#ignore = makeIgnore(opts.ignore, opts); + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? await e.lstat() : e, ifDir); + } + matchCheckTest(e, ifDir) { + return e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + !this.#ignored(e) + ? e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + return this.matchCheckTest(needStat ? e.lstatSync() : e, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) + ? '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.matches = new Set(); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json new file mode 100644 index 0000000000000..2d25985d2bbb5 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/glob/package.json @@ -0,0 +1,98 @@ +{ + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", + "name": "glob", + "description": "the most correct and second fastest glob implementation in JavaScript", + "version": "10.3.3", + "bin": "./dist/cjs/src/bin.js", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", + "prebench": "npm run prepare", + "bench": "bash benchmark.sh", + "preprof": "npm run prepare", + "prof": "bash prof.sh", + "benchclean": "node benchclean.js" + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^2.0.3", + "minimatch": "^9.0.1", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0", + "path-scurry": "^1.10.1" + }, + "devDependencies": { + "@types/node": "^20.3.2", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "memfs": "^3.4.13", + "mkdirp": "^2.1.4", + "prettier": "^2.8.3", + "rimraf": "^4.1.3", + "tap": "^16.3.4", + "ts-node": "^10.9.1", + "typedoc": "^0.23.24", + "typescript": "^4.9.4" + }, + "tap": { + "before": "test/00-setup.ts", + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } +} diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE new file mode 100644 index 0000000000000..1493534e60dce --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js new file mode 100644 index 0000000000000..5fc86bbd0116c --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js new file mode 100644 index 0000000000000..a98ae79b503b1 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/ast.js @@ -0,0 +1,589 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js new file mode 100644 index 0000000000000..0e13eefc4cfee --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js new file mode 100644 index 0000000000000..02a4f8a8e0a58 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js new file mode 100644 index 0000000000000..d70e681fef5d7 --- /dev/null +++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/index.js @@ -0,0 +1,1011 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = __importDefault(require("brace-expansion")); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.default)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/cjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
new file mode 100644
index 0000000000000..9f9835e06a7d5
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/ast.js
@@ -0,0 +1,585 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
new file mode 100644
index 0000000000000..831b6a67f63fb
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/index.js
@@ -0,0 +1,995 @@
+import expand from 'brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        return fastTest ? Object.assign(re, { test: fastTest }) : re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/dist/mjs/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..061c3b9f34330
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minimatch/package.json
@@ -0,0 +1,86 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "9.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.7",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/node-gyp/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/pacote/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/LICENSE.md b/node_modules/sigstore/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/path.js b/node_modules/sigstore/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/read.js b/node_modules/sigstore/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/rm.js b/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/content/write.js b/node_modules/sigstore/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/entry-index.js b/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/get.js b/node_modules/sigstore/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/sigstore/node_modules/cacache/lib/index.js b/node_modules/sigstore/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/sigstore/node_modules/cacache/lib/memoization.js b/node_modules/sigstore/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/put.js b/node_modules/sigstore/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/rm.js b/node_modules/sigstore/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/glob.js b/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js b/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/lib/verify.js b/node_modules/sigstore/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/cacache/package.json b/node_modules/sigstore/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/node_modules/tuf-js/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..f41b539df65dc
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,166 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+  if (typeof size === 'number' && stat.size !== size) {
+    throw sizeError(size, stat.size)
+  }
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // just stat to ensure it exists
+      const stat = await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+    if (typeof size === 'number' && size !== stat.size) {
+      return stream.emit('error', sizeError(size, stat.size))
+    }
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath, sri) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..7146146581287
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,205 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri, opts) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..722a37af5ce15
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,330 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const buckets = await readdirOrEmpty(indexDir)
+    await Promise.all(buckets.map(async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await Promise.all(subbuckets.map(async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await Promise.all(subbucketEntries.map(async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        }))
+      }))
+    }))
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data, filter) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/get.js b/node_modules/tuf-js/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/index.js b/node_modules/tuf-js/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..0ff604a479c9c
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const LRU = require('lru-cache')
+
+const MEMOIZED = new LRU({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/put.js b/node_modules/tuf-js/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/node_modules/tuf-js/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/node_modules/tuf-js/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..62e85c946490f
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/lib/verify.js
@@ -0,0 +1,257 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const pMap = require('p-map')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime (cache, opts) {
+  return { startTime: new Date() }
+}
+
+async function markEndTime (cache, opts) {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats, opts) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..97f8e32ed82e4
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
new file mode 100644
index 0000000000000..b6cdae8eb514b
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/index.js
@@ -0,0 +1,1028 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Minipass = exports.isWritable = exports.isReadable = exports.isStream = void 0;
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+const events_1 = require("events");
+const stream_1 = __importDefault(require("stream"));
+const string_decoder_1 = require("string_decoder");
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof stream_1.default ||
+        (0, exports.isReadable)(s) ||
+        (0, exports.isWritable)(s));
+exports.isStream = isStream;
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== stream_1.default.Writable.prototype.pipe;
+exports.isReadable = isReadable;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof events_1.EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+exports.isWritable = isWritable;
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+class Minipass extends events_1.EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new string_decoder_1.StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return exports.isStream;
+    }
+}
+exports.Minipass = Minipass;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/cjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
new file mode 100644
index 0000000000000..b65fafbae43a4
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/index.js
@@ -0,0 +1,1018 @@
+const proc = typeof process === 'object' && process
+    ? process
+    : {
+        stdout: null,
+        stderr: null,
+    };
+import { EventEmitter } from 'events';
+import Stream from 'stream';
+import { StringDecoder } from 'string_decoder';
+/**
+ * Return true if the argument is a Minipass stream, Node stream, or something
+ * else that Minipass can interact with.
+ */
+export const isStream = (s) => !!s &&
+    typeof s === 'object' &&
+    (s instanceof Minipass ||
+        s instanceof Stream ||
+        isReadable(s) ||
+        isWritable(s));
+/**
+ * Return true if the argument is a valid {@link Minipass.Readable}
+ */
+export const isReadable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.pipe === 'function' &&
+    // node core Writable streams have a pipe() method, but it throws
+    s.pipe !== Stream.Writable.prototype.pipe;
+/**
+ * Return true if the argument is a valid {@link Minipass.Writable}
+ */
+export const isWritable = (s) => !!s &&
+    typeof s === 'object' &&
+    s instanceof EventEmitter &&
+    typeof s.write === 'function' &&
+    typeof s.end === 'function';
+const EOF = Symbol('EOF');
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd');
+const EMITTED_END = Symbol('emittedEnd');
+const EMITTING_END = Symbol('emittingEnd');
+const EMITTED_ERROR = Symbol('emittedError');
+const CLOSED = Symbol('closed');
+const READ = Symbol('read');
+const FLUSH = Symbol('flush');
+const FLUSHCHUNK = Symbol('flushChunk');
+const ENCODING = Symbol('encoding');
+const DECODER = Symbol('decoder');
+const FLOWING = Symbol('flowing');
+const PAUSED = Symbol('paused');
+const RESUME = Symbol('resume');
+const BUFFER = Symbol('buffer');
+const PIPES = Symbol('pipes');
+const BUFFERLENGTH = Symbol('bufferLength');
+const BUFFERPUSH = Symbol('bufferPush');
+const BUFFERSHIFT = Symbol('bufferShift');
+const OBJECTMODE = Symbol('objectMode');
+// internal event when stream is destroyed
+const DESTROYED = Symbol('destroyed');
+// internal event when stream has an error
+const ERROR = Symbol('error');
+const EMITDATA = Symbol('emitData');
+const EMITEND = Symbol('emitEnd');
+const EMITEND2 = Symbol('emitEnd2');
+const ASYNC = Symbol('async');
+const ABORT = Symbol('abort');
+const ABORTED = Symbol('aborted');
+const SIGNAL = Symbol('signal');
+const DATALISTENERS = Symbol('dataListeners');
+const DISCARDED = Symbol('discarded');
+const defer = (fn) => Promise.resolve().then(fn);
+const nodefer = (fn) => fn();
+const isEndish = (ev) => ev === 'end' || ev === 'finish' || ev === 'prefinish';
+const isArrayBufferLike = (b) => b instanceof ArrayBuffer ||
+    (!!b &&
+        typeof b === 'object' &&
+        b.constructor &&
+        b.constructor.name === 'ArrayBuffer' &&
+        b.byteLength >= 0);
+const isArrayBufferView = (b) => !Buffer.isBuffer(b) && ArrayBuffer.isView(b);
+/**
+ * Internal class representing a pipe to a destination stream.
+ *
+ * @internal
+ */
+class Pipe {
+    src;
+    dest;
+    opts;
+    ondrain;
+    constructor(src, dest, opts) {
+        this.src = src;
+        this.dest = dest;
+        this.opts = opts;
+        this.ondrain = () => src[RESUME]();
+        this.dest.on('drain', this.ondrain);
+    }
+    unpipe() {
+        this.dest.removeListener('drain', this.ondrain);
+    }
+    // only here for the prototype
+    /* c8 ignore start */
+    proxyErrors(_er) { }
+    /* c8 ignore stop */
+    end() {
+        this.unpipe();
+        if (this.opts.end)
+            this.dest.end();
+    }
+}
+/**
+ * Internal class representing a pipe to a destination stream where
+ * errors are proxied.
+ *
+ * @internal
+ */
+class PipeProxyErrors extends Pipe {
+    unpipe() {
+        this.src.removeListener('error', this.proxyErrors);
+        super.unpipe();
+    }
+    constructor(src, dest, opts) {
+        super(src, dest, opts);
+        this.proxyErrors = er => dest.emit('error', er);
+        src.on('error', this.proxyErrors);
+    }
+}
+const isObjectModeOptions = (o) => !!o.objectMode;
+const isEncodingOptions = (o) => !o.objectMode && !!o.encoding && o.encoding !== 'buffer';
+/**
+ * Main export, the Minipass class
+ *
+ * `RType` is the type of data emitted, defaults to Buffer
+ *
+ * `WType` is the type of data to be written, if RType is buffer or string,
+ * then any {@link Minipass.ContiguousData} is allowed.
+ *
+ * `Events` is the set of event handler signatures that this object
+ * will emit, see {@link Minipass.Events}
+ */
+export class Minipass extends EventEmitter {
+    [FLOWING] = false;
+    [PAUSED] = false;
+    [PIPES] = [];
+    [BUFFER] = [];
+    [OBJECTMODE];
+    [ENCODING];
+    [ASYNC];
+    [DECODER];
+    [EOF] = false;
+    [EMITTED_END] = false;
+    [EMITTING_END] = false;
+    [CLOSED] = false;
+    [EMITTED_ERROR] = null;
+    [BUFFERLENGTH] = 0;
+    [DESTROYED] = false;
+    [SIGNAL];
+    [ABORTED] = false;
+    [DATALISTENERS] = 0;
+    [DISCARDED] = false;
+    /**
+     * true if the stream can be written
+     */
+    writable = true;
+    /**
+     * true if the stream can be read
+     */
+    readable = true;
+    /**
+     * If `RType` is Buffer, then options do not need to be provided.
+     * Otherwise, an options object must be provided to specify either
+     * {@link Minipass.SharedOptions.objectMode} or
+     * {@link Minipass.SharedOptions.encoding}, as appropriate.
+     */
+    constructor(...args) {
+        const options = (args[0] ||
+            {});
+        super();
+        if (options.objectMode && typeof options.encoding === 'string') {
+            throw new TypeError('Encoding and objectMode may not be used together');
+        }
+        if (isObjectModeOptions(options)) {
+            this[OBJECTMODE] = true;
+            this[ENCODING] = null;
+        }
+        else if (isEncodingOptions(options)) {
+            this[ENCODING] = options.encoding;
+            this[OBJECTMODE] = false;
+        }
+        else {
+            this[OBJECTMODE] = false;
+            this[ENCODING] = null;
+        }
+        this[ASYNC] = !!options.async;
+        this[DECODER] = this[ENCODING]
+            ? new StringDecoder(this[ENCODING])
+            : null;
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposeBuffer === true) {
+            Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] });
+        }
+        //@ts-ignore - private option for debugging and testing
+        if (options && options.debugExposePipes === true) {
+            Object.defineProperty(this, 'pipes', { get: () => this[PIPES] });
+        }
+        const { signal } = options;
+        if (signal) {
+            this[SIGNAL] = signal;
+            if (signal.aborted) {
+                this[ABORT]();
+            }
+            else {
+                signal.addEventListener('abort', () => this[ABORT]());
+            }
+        }
+    }
+    /**
+     * The amount of data stored in the buffer waiting to be read.
+     *
+     * For Buffer strings, this will be the total byte length.
+     * For string encoding streams, this will be the string character length,
+     * according to JavaScript's `string.length` logic.
+     * For objectMode streams, this is a count of the items waiting to be
+     * emitted.
+     */
+    get bufferLength() {
+        return this[BUFFERLENGTH];
+    }
+    /**
+     * The `BufferEncoding` currently in use, or `null`
+     */
+    get encoding() {
+        return this[ENCODING];
+    }
+    /**
+     * @deprecated - This is a read only property
+     */
+    set encoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * @deprecated - Encoding may only be set at instantiation time
+     */
+    setEncoding(_enc) {
+        throw new Error('Encoding must be set at instantiation time');
+    }
+    /**
+     * True if this is an objectMode stream
+     */
+    get objectMode() {
+        return this[OBJECTMODE];
+    }
+    /**
+     * @deprecated - This is a read-only property
+     */
+    set objectMode(_om) {
+        throw new Error('objectMode must be set at instantiation time');
+    }
+    /**
+     * true if this is an async stream
+     */
+    get ['async']() {
+        return this[ASYNC];
+    }
+    /**
+     * Set to true to make this stream async.
+     *
+     * Once set, it cannot be unset, as this would potentially cause incorrect
+     * behavior.  Ie, a sync stream can be made async, but an async stream
+     * cannot be safely made sync.
+     */
+    set ['async'](a) {
+        this[ASYNC] = this[ASYNC] || !!a;
+    }
+    // drop everything and get out of the flow completely
+    [ABORT]() {
+        this[ABORTED] = true;
+        this.emit('abort', this[SIGNAL]?.reason);
+        this.destroy(this[SIGNAL]?.reason);
+    }
+    /**
+     * True if the stream has been aborted.
+     */
+    get aborted() {
+        return this[ABORTED];
+    }
+    /**
+     * No-op setter. Stream aborted status is set via the AbortSignal provided
+     * in the constructor options.
+     */
+    set aborted(_) { }
+    write(chunk, encoding, cb) {
+        if (this[ABORTED])
+            return false;
+        if (this[EOF])
+            throw new Error('write after end');
+        if (this[DESTROYED]) {
+            this.emit('error', Object.assign(new Error('Cannot call write after a stream was destroyed'), { code: 'ERR_STREAM_DESTROYED' }));
+            return true;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (!encoding)
+            encoding = 'utf8';
+        const fn = this[ASYNC] ? defer : nodefer;
+        // convert array buffers and typed array views into buffers
+        // at some point in the future, we may want to do the opposite!
+        // leave strings and buffers as-is
+        // anything is only allowed if in object mode, so throw
+        if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+            if (isArrayBufferView(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength);
+            }
+            else if (isArrayBufferLike(chunk)) {
+                //@ts-ignore - sinful unsafe type changing
+                chunk = Buffer.from(chunk);
+            }
+            else if (typeof chunk !== 'string') {
+                throw new Error('Non-contiguous data written to non-objectMode stream');
+            }
+        }
+        // handle object mode up front, since it's simpler
+        // this yields better performance, fewer checks later.
+        if (this[OBJECTMODE]) {
+            // maybe impossible?
+            /* c8 ignore start */
+            if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+                this[FLUSH](true);
+            /* c8 ignore stop */
+            if (this[FLOWING])
+                this.emit('data', chunk);
+            else
+                this[BUFFERPUSH](chunk);
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // at this point the chunk is a buffer or string
+        // don't buffer it up or send it to the decoder
+        if (!chunk.length) {
+            if (this[BUFFERLENGTH] !== 0)
+                this.emit('readable');
+            if (cb)
+                fn(cb);
+            return this[FLOWING];
+        }
+        // fast-path writing strings of same encoding to a stream with
+        // an empty buffer, skipping the buffer/decoder dance
+        if (typeof chunk === 'string' &&
+            // unless it is a string already ready for us to use
+            !(encoding === this[ENCODING] && !this[DECODER]?.lastNeed)) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (Buffer.isBuffer(chunk) && this[ENCODING]) {
+            //@ts-ignore - sinful unsafe type change
+            chunk = this[DECODER].write(chunk);
+        }
+        // Note: flushing CAN potentially switch us into not-flowing mode
+        if (this[FLOWING] && this[BUFFERLENGTH] !== 0)
+            this[FLUSH](true);
+        if (this[FLOWING])
+            this.emit('data', chunk);
+        else
+            this[BUFFERPUSH](chunk);
+        if (this[BUFFERLENGTH] !== 0)
+            this.emit('readable');
+        if (cb)
+            fn(cb);
+        return this[FLOWING];
+    }
+    /**
+     * Low-level explicit read method.
+     *
+     * In objectMode, the argument is ignored, and one item is returned if
+     * available.
+     *
+     * `n` is the number of bytes (or in the case of encoding streams,
+     * characters) to consume. If `n` is not provided, then the entire buffer
+     * is returned, or `null` is returned if no data is available.
+     *
+     * If `n` is greater that the amount of data in the internal buffer,
+     * then `null` is returned.
+     */
+    read(n) {
+        if (this[DESTROYED])
+            return null;
+        this[DISCARDED] = false;
+        if (this[BUFFERLENGTH] === 0 ||
+            n === 0 ||
+            (n && n > this[BUFFERLENGTH])) {
+            this[MAYBE_EMIT_END]();
+            return null;
+        }
+        if (this[OBJECTMODE])
+            n = null;
+        if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
+            // not object mode, so if we have an encoding, then RType is string
+            // otherwise, must be Buffer
+            this[BUFFER] = [
+                (this[ENCODING]
+                    ? this[BUFFER].join('')
+                    : Buffer.concat(this[BUFFER], this[BUFFERLENGTH])),
+            ];
+        }
+        const ret = this[READ](n || null, this[BUFFER][0]);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [READ](n, chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERSHIFT]();
+        else {
+            const c = chunk;
+            if (n === c.length || n === null)
+                this[BUFFERSHIFT]();
+            else if (typeof c === 'string') {
+                this[BUFFER][0] = c.slice(n);
+                chunk = c.slice(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+            else {
+                this[BUFFER][0] = c.subarray(n);
+                chunk = c.subarray(0, n);
+                this[BUFFERLENGTH] -= n;
+            }
+        }
+        this.emit('data', chunk);
+        if (!this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+        return chunk;
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = 'utf8';
+        }
+        if (chunk !== undefined)
+            this.write(chunk, encoding);
+        if (cb)
+            this.once('end', cb);
+        this[EOF] = true;
+        this.writable = false;
+        // if we haven't written anything, then go ahead and emit,
+        // even if we're not reading.
+        // we'll re-emit if a new 'end' listener is added anyway.
+        // This makes MP more suitable to write-only use cases.
+        if (this[FLOWING] || !this[PAUSED])
+            this[MAYBE_EMIT_END]();
+        return this;
+    }
+    // don't let the internal resume be overwritten
+    [RESUME]() {
+        if (this[DESTROYED])
+            return;
+        if (!this[DATALISTENERS] && !this[PIPES].length) {
+            this[DISCARDED] = true;
+        }
+        this[PAUSED] = false;
+        this[FLOWING] = true;
+        this.emit('resume');
+        if (this[BUFFER].length)
+            this[FLUSH]();
+        else if (this[EOF])
+            this[MAYBE_EMIT_END]();
+        else
+            this.emit('drain');
+    }
+    /**
+     * Resume the stream if it is currently in a paused state
+     *
+     * If called when there are no pipe destinations or `data` event listeners,
+     * this will place the stream in a "discarded" state, where all data will
+     * be thrown away. The discarded state is removed if a pipe destination or
+     * data handler is added, if pause() is called, or if any synchronous or
+     * asynchronous iteration is started.
+     */
+    resume() {
+        return this[RESUME]();
+    }
+    /**
+     * Pause the stream
+     */
+    pause() {
+        this[FLOWING] = false;
+        this[PAUSED] = true;
+        this[DISCARDED] = false;
+    }
+    /**
+     * true if the stream has been forcibly destroyed
+     */
+    get destroyed() {
+        return this[DESTROYED];
+    }
+    /**
+     * true if the stream is currently in a flowing state, meaning that
+     * any writes will be immediately emitted.
+     */
+    get flowing() {
+        return this[FLOWING];
+    }
+    /**
+     * true if the stream is currently in a paused state
+     */
+    get paused() {
+        return this[PAUSED];
+    }
+    [BUFFERPUSH](chunk) {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] += 1;
+        else
+            this[BUFFERLENGTH] += chunk.length;
+        this[BUFFER].push(chunk);
+    }
+    [BUFFERSHIFT]() {
+        if (this[OBJECTMODE])
+            this[BUFFERLENGTH] -= 1;
+        else
+            this[BUFFERLENGTH] -= this[BUFFER][0].length;
+        return this[BUFFER].shift();
+    }
+    [FLUSH](noDrain = false) {
+        do { } while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) &&
+            this[BUFFER].length);
+        if (!noDrain && !this[BUFFER].length && !this[EOF])
+            this.emit('drain');
+    }
+    [FLUSHCHUNK](chunk) {
+        this.emit('data', chunk);
+        return this[FLOWING];
+    }
+    /**
+     * Pipe all data emitted by this stream into the destination provided.
+     *
+     * Triggers the flow of data.
+     */
+    pipe(dest, opts) {
+        if (this[DESTROYED])
+            return dest;
+        this[DISCARDED] = false;
+        const ended = this[EMITTED_END];
+        opts = opts || {};
+        if (dest === proc.stdout || dest === proc.stderr)
+            opts.end = false;
+        else
+            opts.end = opts.end !== false;
+        opts.proxyErrors = !!opts.proxyErrors;
+        // piping an ended stream ends immediately
+        if (ended) {
+            if (opts.end)
+                dest.end();
+        }
+        else {
+            // "as" here just ignores the WType, which pipes don't care about,
+            // since they're only consuming from us, and writing to the dest
+            this[PIPES].push(!opts.proxyErrors
+                ? new Pipe(this, dest, opts)
+                : new PipeProxyErrors(this, dest, opts));
+            if (this[ASYNC])
+                defer(() => this[RESUME]());
+            else
+                this[RESUME]();
+        }
+        return dest;
+    }
+    /**
+     * Fully unhook a piped destination stream.
+     *
+     * If the destination stream was the only consumer of this stream (ie,
+     * there are no other piped destinations or `'data'` event listeners)
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    unpipe(dest) {
+        const p = this[PIPES].find(p => p.dest === dest);
+        if (p) {
+            if (this[PIPES].length === 1) {
+                if (this[FLOWING] && this[DATALISTENERS] === 0) {
+                    this[FLOWING] = false;
+                }
+                this[PIPES] = [];
+            }
+            else
+                this[PIPES].splice(this[PIPES].indexOf(p), 1);
+            p.unpipe();
+        }
+    }
+    /**
+     * Alias for {@link Minipass#on}
+     */
+    addListener(ev, handler) {
+        return this.on(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.on`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * - Adding a 'data' event handler will trigger the flow of data
+     *
+     * - Adding a 'readable' event handler when there is data waiting to be read
+     *   will cause 'readable' to be emitted immediately.
+     *
+     * - Adding an 'endish' event handler ('end', 'finish', etc.) which has
+     *   already passed will cause the event to be emitted immediately and all
+     *   handlers removed.
+     *
+     * - Adding an 'error' event handler after an error has been emitted will
+     *   cause the event to be re-emitted immediately with the error previously
+     *   raised.
+     */
+    on(ev, handler) {
+        const ret = super.on(ev, handler);
+        if (ev === 'data') {
+            this[DISCARDED] = false;
+            this[DATALISTENERS]++;
+            if (!this[PIPES].length && !this[FLOWING]) {
+                this[RESUME]();
+            }
+        }
+        else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) {
+            super.emit('readable');
+        }
+        else if (isEndish(ev) && this[EMITTED_END]) {
+            super.emit(ev);
+            this.removeAllListeners(ev);
+        }
+        else if (ev === 'error' && this[EMITTED_ERROR]) {
+            const h = handler;
+            if (this[ASYNC])
+                defer(() => h.call(this, this[EMITTED_ERROR]));
+            else
+                h.call(this, this[EMITTED_ERROR]);
+        }
+        return ret;
+    }
+    /**
+     * Alias for {@link Minipass#off}
+     */
+    removeListener(ev, handler) {
+        return this.off(ev, handler);
+    }
+    /**
+     * Mostly identical to `EventEmitter.off`
+     *
+     * If a 'data' event handler is removed, and it was the last consumer
+     * (ie, there are no pipe destinations or other 'data' event listeners),
+     * then the flow of data will stop until there is another consumer or
+     * {@link Minipass#resume} is explicitly called.
+     */
+    off(ev, handler) {
+        const ret = super.off(ev, handler);
+        // if we previously had listeners, and now we don't, and we don't
+        // have any pipes, then stop the flow, unless it's been explicitly
+        // put in a discarded flowing state via stream.resume().
+        if (ev === 'data') {
+            this[DATALISTENERS] = this.listeners('data').length;
+            if (this[DATALISTENERS] === 0 &&
+                !this[DISCARDED] &&
+                !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * Mostly identical to `EventEmitter.removeAllListeners`
+     *
+     * If all 'data' event handlers are removed, and they were the last consumer
+     * (ie, there are no pipe destinations), then the flow of data will stop
+     * until there is another consumer or {@link Minipass#resume} is explicitly
+     * called.
+     */
+    removeAllListeners(ev) {
+        const ret = super.removeAllListeners(ev);
+        if (ev === 'data' || ev === undefined) {
+            this[DATALISTENERS] = 0;
+            if (!this[DISCARDED] && !this[PIPES].length) {
+                this[FLOWING] = false;
+            }
+        }
+        return ret;
+    }
+    /**
+     * true if the 'end' event has been emitted
+     */
+    get emittedEnd() {
+        return this[EMITTED_END];
+    }
+    [MAYBE_EMIT_END]() {
+        if (!this[EMITTING_END] &&
+            !this[EMITTED_END] &&
+            !this[DESTROYED] &&
+            this[BUFFER].length === 0 &&
+            this[EOF]) {
+            this[EMITTING_END] = true;
+            this.emit('end');
+            this.emit('prefinish');
+            this.emit('finish');
+            if (this[CLOSED])
+                this.emit('close');
+            this[EMITTING_END] = false;
+        }
+    }
+    /**
+     * Mostly identical to `EventEmitter.emit`, with the following
+     * behavior differences to prevent data loss and unnecessary hangs:
+     *
+     * If the stream has been destroyed, and the event is something other
+     * than 'close' or 'error', then `false` is returned and no handlers
+     * are called.
+     *
+     * If the event is 'end', and has already been emitted, then the event
+     * is ignored. If the stream is in a paused or non-flowing state, then
+     * the event will be deferred until data flow resumes. If the stream is
+     * async, then handlers will be called on the next tick rather than
+     * immediately.
+     *
+     * If the event is 'close', and 'end' has not yet been emitted, then
+     * the event will be deferred until after 'end' is emitted.
+     *
+     * If the event is 'error', and an AbortSignal was provided for the stream,
+     * and there are no listeners, then the event is ignored, matching the
+     * behavior of node core streams in the presense of an AbortSignal.
+     *
+     * If the event is 'finish' or 'prefinish', then all listeners will be
+     * removed after emitting the event, to prevent double-firing.
+     */
+    emit(ev, ...args) {
+        const data = args[0];
+        // error and close are only events allowed after calling destroy()
+        if (ev !== 'error' &&
+            ev !== 'close' &&
+            ev !== DESTROYED &&
+            this[DESTROYED]) {
+            return false;
+        }
+        else if (ev === 'data') {
+            return !this[OBJECTMODE] && !data
+                ? false
+                : this[ASYNC]
+                    ? (defer(() => this[EMITDATA](data)), true)
+                    : this[EMITDATA](data);
+        }
+        else if (ev === 'end') {
+            return this[EMITEND]();
+        }
+        else if (ev === 'close') {
+            this[CLOSED] = true;
+            // don't emit close before 'end' and 'finish'
+            if (!this[EMITTED_END] && !this[DESTROYED])
+                return false;
+            const ret = super.emit('close');
+            this.removeAllListeners('close');
+            return ret;
+        }
+        else if (ev === 'error') {
+            this[EMITTED_ERROR] = data;
+            super.emit(ERROR, data);
+            const ret = !this[SIGNAL] || this.listeners('error').length
+                ? super.emit('error', data)
+                : false;
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'resume') {
+            const ret = super.emit('resume');
+            this[MAYBE_EMIT_END]();
+            return ret;
+        }
+        else if (ev === 'finish' || ev === 'prefinish') {
+            const ret = super.emit(ev);
+            this.removeAllListeners(ev);
+            return ret;
+        }
+        // Some other unknown event
+        const ret = super.emit(ev, ...args);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITDATA](data) {
+        for (const p of this[PIPES]) {
+            if (p.dest.write(data) === false)
+                this.pause();
+        }
+        const ret = this[DISCARDED] ? false : super.emit('data', data);
+        this[MAYBE_EMIT_END]();
+        return ret;
+    }
+    [EMITEND]() {
+        if (this[EMITTED_END])
+            return false;
+        this[EMITTED_END] = true;
+        this.readable = false;
+        return this[ASYNC]
+            ? (defer(() => this[EMITEND2]()), true)
+            : this[EMITEND2]();
+    }
+    [EMITEND2]() {
+        if (this[DECODER]) {
+            const data = this[DECODER].end();
+            if (data) {
+                for (const p of this[PIPES]) {
+                    p.dest.write(data);
+                }
+                if (!this[DISCARDED])
+                    super.emit('data', data);
+            }
+        }
+        for (const p of this[PIPES]) {
+            p.end();
+        }
+        const ret = super.emit('end');
+        this.removeAllListeners('end');
+        return ret;
+    }
+    /**
+     * Return a Promise that resolves to an array of all emitted data once
+     * the stream ends.
+     */
+    async collect() {
+        const buf = Object.assign([], {
+            dataLength: 0,
+        });
+        if (!this[OBJECTMODE])
+            buf.dataLength = 0;
+        // set the promise first, in case an error is raised
+        // by triggering the flow here.
+        const p = this.promise();
+        this.on('data', c => {
+            buf.push(c);
+            if (!this[OBJECTMODE])
+                buf.dataLength += c.length;
+        });
+        await p;
+        return buf;
+    }
+    /**
+     * Return a Promise that resolves to the concatenation of all emitted data
+     * once the stream ends.
+     *
+     * Not allowed on objectMode streams.
+     */
+    async concat() {
+        if (this[OBJECTMODE]) {
+            throw new Error('cannot concat in objectMode');
+        }
+        const buf = await this.collect();
+        return (this[ENCODING]
+            ? buf.join('')
+            : Buffer.concat(buf, buf.dataLength));
+    }
+    /**
+     * Return a void Promise that resolves once the stream ends.
+     */
+    async promise() {
+        return new Promise((resolve, reject) => {
+            this.on(DESTROYED, () => reject(new Error('stream destroyed')));
+            this.on('error', er => reject(er));
+            this.on('end', () => resolve());
+        });
+    }
+    /**
+     * Asynchronous `for await of` iteration.
+     *
+     * This will continue emitting all chunks until the stream terminates.
+     */
+    [Symbol.asyncIterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = async () => {
+            this.pause();
+            stopped = true;
+            return { value: undefined, done: true };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const res = this.read();
+            if (res !== null)
+                return Promise.resolve({ done: false, value: res });
+            if (this[EOF])
+                return stop();
+            let resolve;
+            let reject;
+            const onerr = (er) => {
+                this.off('data', ondata);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                reject(er);
+            };
+            const ondata = (value) => {
+                this.off('error', onerr);
+                this.off('end', onend);
+                this.off(DESTROYED, ondestroy);
+                this.pause();
+                resolve({ value, done: !!this[EOF] });
+            };
+            const onend = () => {
+                this.off('error', onerr);
+                this.off('data', ondata);
+                this.off(DESTROYED, ondestroy);
+                stop();
+                resolve({ done: true, value: undefined });
+            };
+            const ondestroy = () => onerr(new Error('stream destroyed'));
+            return new Promise((res, rej) => {
+                reject = rej;
+                resolve = res;
+                this.once(DESTROYED, ondestroy);
+                this.once('error', onerr);
+                this.once('end', onend);
+                this.once('data', ondata);
+            });
+        };
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.asyncIterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Synchronous `for of` iteration.
+     *
+     * The iteration will terminate when the internal buffer runs out, even
+     * if the stream has not yet terminated.
+     */
+    [Symbol.iterator]() {
+        // set this up front, in case the consumer doesn't call next()
+        // right away.
+        this[DISCARDED] = false;
+        let stopped = false;
+        const stop = () => {
+            this.pause();
+            this.off(ERROR, stop);
+            this.off(DESTROYED, stop);
+            this.off('end', stop);
+            stopped = true;
+            return { done: true, value: undefined };
+        };
+        const next = () => {
+            if (stopped)
+                return stop();
+            const value = this.read();
+            return value === null ? stop() : { done: false, value };
+        };
+        this.once('end', stop);
+        this.once(ERROR, stop);
+        this.once(DESTROYED, stop);
+        return {
+            next,
+            throw: stop,
+            return: stop,
+            [Symbol.iterator]() {
+                return this;
+            },
+        };
+    }
+    /**
+     * Destroy a stream, preventing it from being used for any further purpose.
+     *
+     * If the stream has a `close()` method, then it will be called on
+     * destruction.
+     *
+     * After destruction, any attempt to write data, read data, or emit most
+     * events will be ignored.
+     *
+     * If an error argument is provided, then it will be emitted in an
+     * 'error' event.
+     */
+    destroy(er) {
+        if (this[DESTROYED]) {
+            if (er)
+                this.emit('error', er);
+            else
+                this.emit(DESTROYED);
+            return this;
+        }
+        this[DESTROYED] = true;
+        this[DISCARDED] = true;
+        // throw away all buffered data, it's never coming out
+        this[BUFFER].length = 0;
+        this[BUFFERLENGTH] = 0;
+        const wc = this;
+        if (typeof wc.close === 'function' && !this[CLOSED])
+            wc.close();
+        if (er)
+            this.emit('error', er);
+        // if no error to emit, still reject pending promises
+        else
+            this.emit(DESTROYED);
+        return this;
+    }
+    /**
+     * Alias for {@link isStream}
+     *
+     * Former export location, maintained for backwards compatibility.
+     *
+     * @deprecated
+     */
+    static get isStream() {
+        return isStream;
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..6faaa247a5bc6
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/node_modules/minipass/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "minipass",
+  "version": "7.0.3",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "./dist/cjs/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/cjs/index.js",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/index.d.ts",
+        "default": "./dist/cjs/index.js"
+      }
+    },
+    "./package.json": "./package.json"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json && bash ./scripts/fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--enable-source-maps",
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^20.1.2",
+    "@types/tap": "^15.0.8",
+    "c8": "^7.13.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.3.0",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.24.8",
+    "typescript": "^5.1.3",
+    "end-of-stream": "^1.4.0",
+    "node-abort-controller": "^3.1.1",
+    "sync-content": "^1.0.2",
+    "through2": "^2.0.3"
+  },
+  "repository": "https://github.com/isaacs/minipass",
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  }
+}
diff --git a/node_modules/tuf-js/node_modules/cacache/package.json b/node_modules/tuf-js/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ab58cb8b7c50f
--- /dev/null
+++ b/node_modules/tuf-js/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "17.1.4",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "eslint \"**/*.js\"",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run lint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^3.1.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^7.7.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^1.0.2",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^4.0.0",
+    "ssri": "^10.0.0",
+    "tar": "^6.1.11",
+    "unique-filename": "^3.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^4.0.0",
+    "@npmcli/template-oss": "4.18.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.18.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index 805c6503cdc0c..11c73d2843e2f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -96,7 +96,7 @@
         "@npmcli/run-script": "^6.0.2",
         "abbrev": "^2.0.0",
         "archy": "~1.0.0",
-        "cacache": "^17.1.4",
+        "cacache": "^18.0.0",
         "chalk": "^5.3.0",
         "ci-info": "^3.8.0",
         "cli-columns": "^4.0.0",
@@ -2482,6 +2482,28 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -3576,15 +3598,15 @@
       }
     },
     "node_modules/cacache": {
-      "version": "17.1.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
-      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "version": "18.0.0",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.0.tgz",
+      "integrity": "sha512-I7mVOPl3PUCeRub1U8YoGz2Lqv9WOBpobZ8RyWFXmReuILz+3OAyTa5oH3QPdtKZD7N0Yk00aLfzn0qvp8dZ1w==",
       "inBundle": true,
       "dependencies": {
         "@npmcli/fs": "^3.1.0",
         "fs-minipass": "^3.0.0",
         "glob": "^10.2.2",
-        "lru-cache": "^7.7.1",
+        "lru-cache": "^10.0.1",
         "minipass": "^7.0.3",
         "minipass-collect": "^1.0.2",
         "minipass-flush": "^1.0.5",
@@ -3595,7 +3617,16 @@
         "unique-filename": "^3.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/cacache/node_modules/lru-cache": {
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz",
+      "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==",
+      "inBundle": true,
+      "engines": {
+        "node": "14 || >=16.14"
       }
     },
     "node_modules/caching-transform": {
@@ -7982,6 +8013,29 @@
         "node": "^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/make-fetch-happen/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/map-obj": {
       "version": "4.3.0",
       "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
@@ -9399,6 +9453,84 @@
         "concat-map": "0.0.1"
       }
     },
+    "node_modules/node-gyp/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/brace-expansion": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz",
+      "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==",
+      "inBundle": true,
+      "dependencies": {
+        "balanced-match": "^1.0.0"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/glob": {
+      "version": "10.3.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.3.tgz",
+      "integrity": "sha512-92vPiMb/iqpmEgsOoIDvTjc50wf9CCCvMzsi6W0JLPeUKE8TWP1a73PgqSrqy7iAZxaSD1YdzU7QZR5LF51MJw==",
+      "inBundle": true,
+      "dependencies": {
+        "foreground-child": "^3.1.0",
+        "jackspeak": "^2.0.3",
+        "minimatch": "^9.0.1",
+        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0",
+        "path-scurry": "^1.10.1"
+      },
+      "bin": {
+        "glob": "dist/cjs/src/bin.js"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/minimatch": {
+      "version": "9.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz",
+      "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==",
+      "inBundle": true,
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/node-gyp/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/node-gyp/node_modules/gauge": {
       "version": "4.0.4",
       "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz",
@@ -10322,6 +10454,29 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/pacote/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -11670,6 +11825,38 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/sigstore/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/sigstore/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -14831,6 +15018,38 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/tuf-js/node_modules/cacache": {
+      "version": "17.1.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-17.1.4.tgz",
+      "integrity": "sha512-/aJwG2l3ZMJ1xNAnqbMpA40of9dj/pIH3QfiuQSqjfPJF747VR0J/bHn+/KdNnHKc6XQcWt/AfRSBft82W1d2A==",
+      "inBundle": true,
+      "dependencies": {
+        "@npmcli/fs": "^3.1.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^7.7.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^1.0.2",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^4.0.0",
+        "ssri": "^10.0.0",
+        "tar": "^6.1.11",
+        "unique-filename": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/tuf-js/node_modules/cacache/node_modules/minipass": {
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.3.tgz",
+      "integrity": "sha512-LhbbwCfz3vsb12j/WkWQPZfKTsgqIe1Nf/ti1pKjYESGLHIVjWU96G9/ljLH4F9mWNVhlQOm0VySdAWzf05dpg==",
+      "inBundle": true,
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      }
+    },
     "node_modules/tuf-js/node_modules/make-fetch-happen": {
       "version": "11.1.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",
@@ -15833,7 +16052,7 @@
         "@npmcli/query": "^3.0.0",
         "@npmcli/run-script": "^6.0.0",
         "bin-links": "^4.0.1",
-        "cacache": "^17.1.4",
+        "cacache": "^18.0.0",
         "common-ancestor-path": "^1.0.1",
         "hosted-git-info": "^6.1.1",
         "json-parse-even-better-errors": "^3.0.0",
diff --git a/package.json b/package.json
index e16660472c2c1..e74281c6f093f 100644
--- a/package.json
+++ b/package.json
@@ -61,7 +61,7 @@
     "@npmcli/run-script": "^6.0.2",
     "abbrev": "^2.0.0",
     "archy": "~1.0.0",
-    "cacache": "^17.1.4",
+    "cacache": "^18.0.0",
     "chalk": "^5.3.0",
     "ci-info": "^3.8.0",
     "cli-columns": "^4.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 9238d88a17bf0..6ff0119d21f20 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -14,7 +14,7 @@
     "@npmcli/query": "^3.0.0",
     "@npmcli/run-script": "^6.0.0",
     "bin-links": "^4.0.1",
-    "cacache": "^17.1.4",
+    "cacache": "^18.0.0",
     "common-ancestor-path": "^1.0.1",
     "hosted-git-info": "^6.1.1",
     "json-parse-even-better-errors": "^3.0.0",