diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 324dbb190ca34..4a2ff45175b42 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -524,6 +524,7 @@ graph LR; npm-->libnpmteam; npm-->libnpmversion; npm-->licensee; + npm-->lru-cache; npm-->make-fetch-happen; npm-->minimatch; npm-->minipass-pipeline; diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 71679c6fedcfd..e39385838321d 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -19,21 +19,16 @@ !/@npmcli/ /@npmcli/* !/@npmcli/agent -!/@npmcli/agent/node_modules/ -/@npmcli/agent/node_modules/* -!/@npmcli/agent/node_modules/lru-cache !/@npmcli/disparity-colors !/@npmcli/fs !/@npmcli/git -!/@npmcli/git/node_modules/ -/@npmcli/git/node_modules/* -!/@npmcli/git/node_modules/lru-cache !/@npmcli/installed-package-contents !/@npmcli/map-workspaces !/@npmcli/metavuln-calculator !/@npmcli/metavuln-calculator/node_modules/ /@npmcli/metavuln-calculator/node_modules/* !/@npmcli/metavuln-calculator/node_modules/cacache +!/@npmcli/metavuln-calculator/node_modules/lru-cache !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json @@ -75,9 +70,6 @@ !/buffer !/builtins !/cacache -!/cacache/node_modules/ -/cacache/node_modules/* -!/cacache/node_modules/lru-cache !/chalk !/chownr !/ci-info @@ -125,9 +117,6 @@ !/has-unicode !/has !/hosted-git-info -!/hosted-git-info/node_modules/ -/hosted-git-info/node_modules/* -!/hosted-git-info/node_modules/lru-cache !/http-cache-semantics !/http-proxy-agent !/https-proxy-agent @@ -144,6 +133,7 @@ !/init-package-json/node_modules/ /init-package-json/node_modules/* !/init-package-json/node_modules/hosted-git-info +!/init-package-json/node_modules/lru-cache !/init-package-json/node_modules/npm-package-arg !/ip-regex !/ip @@ -206,6 +196,7 @@ !/node-gyp/node_modules/cacache/node_modules/minipass !/node-gyp/node_modules/gauge !/node-gyp/node_modules/glob +!/node-gyp/node_modules/lru-cache !/node-gyp/node_modules/make-fetch-happen !/node-gyp/node_modules/minimatch !/node-gyp/node_modules/minipass @@ -219,6 +210,7 @@ !/normalize-package-data/node_modules/ /normalize-package-data/node_modules/* !/normalize-package-data/node_modules/hosted-git-info +!/normalize-package-data/node_modules/lru-cache !/npm-audit-report !/npm-bundled !/npm-install-checks @@ -229,6 +221,7 @@ !/npm-pick-manifest/node_modules/ /npm-pick-manifest/node_modules/* !/npm-pick-manifest/node_modules/hosted-git-info +!/npm-pick-manifest/node_modules/lru-cache !/npm-pick-manifest/node_modules/npm-package-arg !/npm-profile !/npm-registry-fetch @@ -239,6 +232,7 @@ !/npm-registry-fetch/node_modules/@npmcli/agent !/npm-registry-fetch/node_modules/cacache !/npm-registry-fetch/node_modules/hosted-git-info +!/npm-registry-fetch/node_modules/lru-cache !/npm-registry-fetch/node_modules/make-fetch-happen !/npm-registry-fetch/node_modules/npm-package-arg !/npm-user-validate @@ -253,14 +247,12 @@ !/pacote/node_modules/@npmcli/git !/pacote/node_modules/cacache !/pacote/node_modules/hosted-git-info +!/pacote/node_modules/lru-cache !/pacote/node_modules/npm-package-arg !/parse-conflict-json !/path-is-absolute !/path-key !/path-scurry -!/path-scurry/node_modules/ -/path-scurry/node_modules/* -!/path-scurry/node_modules/lru-cache !/postcss-selector-parser !/proc-log !/process @@ -299,6 +291,7 @@ !/sigstore/node_modules/cacache/node_modules/ /sigstore/node_modules/cacache/node_modules/* !/sigstore/node_modules/cacache/node_modules/minipass +!/sigstore/node_modules/lru-cache !/sigstore/node_modules/make-fetch-happen !/sigstore/node_modules/minipass !/smart-buffer @@ -333,6 +326,7 @@ !/tuf-js/node_modules/cacache/node_modules/ /tuf-js/node_modules/cacache/node_modules/* !/tuf-js/node_modules/cacache/node_modules/minipass +!/tuf-js/node_modules/lru-cache !/tuf-js/node_modules/make-fetch-happen !/tuf-js/node_modules/minipass !/unique-filename diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js deleted file mode 100644 index 02d76ec800a92..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.js +++ /dev/null @@ -1,1404 +0,0 @@ -"use strict"; -/** - * @module LRUCache - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LRUCache = void 0; -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -exports.LRUCache = LRUCache; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js deleted file mode 100644 index 8d34a03041d25..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js deleted file mode 100644 index 23b9754ad6c76..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.js +++ /dev/null @@ -1,1400 +0,0 @@ -/** - * @module LRUCache - */ -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -export class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js deleted file mode 100644 index 5a16b3940d6df..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/LICENSE diff --git a/node_modules/lru-cache/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js similarity index 100% rename from node_modules/lru-cache/index.js rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.js diff --git a/node_modules/lru-cache/index.mjs b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs similarity index 100% rename from node_modules/lru-cache/index.mjs rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/index.mjs diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json similarity index 52% rename from node_modules/@npmcli/agent/node_modules/lru-cache/package.json rename to node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json index bae4a04839d1f..9684991727e7a 100644 --- a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json +++ b/node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.1", + "version": "7.18.3", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,74 +11,60 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" + "typedoc": "typedoc ./index.d.ts" }, - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", "exports": { - "./min": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.min.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.min.js" - } - }, ".": { "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" + "types": "./index.d.ts", + "default": "./index.mjs" }, "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" + "types": "./index.d.ts", + "default": "./index.js" } - } + }, + "./package.json": "./package.json" }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^20.2.5", + "@types/node": "^17.0.31", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", - "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.9.1", + "ts-node": "^10.7.0", "tslib": "^2.4.0", - "typedoc": "^0.24.6", - "typescript": "^5.0.4" + "typedoc": "^0.23.24", + "typescript": "^4.6.4" }, "license": "ISC", "files": [ - "dist" + "index.js", + "index.mjs", + "index.d.ts" ], "engines": { - "node": "14 || >=16.14" + "node": ">=12" }, "prettier": { "semi": false, @@ -92,17 +78,19 @@ "endOfLine": "lf" }, "tap": { - "coverage": false, + "nyc-arg": [ + "--include=index.js" + ], "node-arg": [ "--expose-gc", - "-r", + "--require", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./dist/mjs/index.js" + "path": "./index.js" } ] } diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js deleted file mode 100644 index 02d76ec800a92..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.js +++ /dev/null @@ -1,1404 +0,0 @@ -"use strict"; -/** - * @module LRUCache - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LRUCache = void 0; -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -exports.LRUCache = LRUCache; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js deleted file mode 100644 index 8d34a03041d25..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js deleted file mode 100644 index 23b9754ad6c76..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.js +++ /dev/null @@ -1,1400 +0,0 @@ -/** - * @module LRUCache - */ -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -export class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js deleted file mode 100644 index 5a16b3940d6df..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/cacache/node_modules/lru-cache/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js deleted file mode 100644 index 02d76ec800a92..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.js +++ /dev/null @@ -1,1404 +0,0 @@ -"use strict"; -/** - * @module LRUCache - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LRUCache = void 0; -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -exports.LRUCache = LRUCache; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js deleted file mode 100644 index 8d34a03041d25..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js deleted file mode 100644 index 23b9754ad6c76..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.js +++ /dev/null @@ -1,1400 +0,0 @@ -/** - * @module LRUCache - */ -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -export class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js deleted file mode 100644 index 5a16b3940d6df..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/@npmcli/git/node_modules/lru-cache/LICENSE rename to node_modules/init-package-json/node_modules/lru-cache/LICENSE diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.js b/node_modules/init-package-json/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/init-package-json/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/init-package-json/node_modules/lru-cache/index.mjs b/node_modules/init-package-json/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/init-package-json/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json similarity index 52% rename from node_modules/cacache/node_modules/lru-cache/package.json rename to node_modules/init-package-json/node_modules/lru-cache/package.json index bae4a04839d1f..9684991727e7a 100644 --- a/node_modules/cacache/node_modules/lru-cache/package.json +++ b/node_modules/init-package-json/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.1", + "version": "7.18.3", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,74 +11,60 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" + "typedoc": "typedoc ./index.d.ts" }, - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", "exports": { - "./min": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.min.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.min.js" - } - }, ".": { "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" + "types": "./index.d.ts", + "default": "./index.mjs" }, "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" + "types": "./index.d.ts", + "default": "./index.js" } - } + }, + "./package.json": "./package.json" }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^20.2.5", + "@types/node": "^17.0.31", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", - "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.9.1", + "ts-node": "^10.7.0", "tslib": "^2.4.0", - "typedoc": "^0.24.6", - "typescript": "^5.0.4" + "typedoc": "^0.23.24", + "typescript": "^4.6.4" }, "license": "ISC", "files": [ - "dist" + "index.js", + "index.mjs", + "index.d.ts" ], "engines": { - "node": "14 || >=16.14" + "node": ">=12" }, "prettier": { "semi": false, @@ -92,17 +78,19 @@ "endOfLine": "lf" }, "tap": { - "coverage": false, + "nyc-arg": [ + "--include=index.js" + ], "node-arg": [ "--expose-gc", - "-r", + "--require", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./dist/mjs/index.js" + "path": "./index.js" } ] } diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js b/node_modules/lru-cache/dist/cjs/index.js similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.js rename to node_modules/lru-cache/dist/cjs/index.js diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/lru-cache/dist/cjs/index.min.js similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/index.min.js rename to node_modules/lru-cache/dist/cjs/index.min.js diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json b/node_modules/lru-cache/dist/cjs/package.json similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/cjs/package.json rename to node_modules/lru-cache/dist/cjs/package.json diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js b/node_modules/lru-cache/dist/mjs/index.js similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.js rename to node_modules/lru-cache/dist/mjs/index.js diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/lru-cache/dist/mjs/index.min.js similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/index.min.js rename to node_modules/lru-cache/dist/mjs/index.min.js diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json b/node_modules/lru-cache/dist/mjs/package.json similarity index 100% rename from node_modules/@npmcli/agent/node_modules/lru-cache/dist/mjs/package.json rename to node_modules/lru-cache/dist/mjs/package.json diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json index 9684991727e7a..bae4a04839d1f 100644 --- a/node_modules/lru-cache/package.json +++ b/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.18.3", + "version": "10.0.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,60 +11,74 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "size": "size-limit", - "test": "tap", - "snap": "tap", + "test": "c8 tap", + "snap": "c8 tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc ./index.d.ts" + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" }, - "type": "commonjs", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", "exports": { - ".": { + "./min": { "import": { - "types": "./index.d.ts", - "default": "./index.mjs" + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" }, "require": { - "types": "./index.d.ts", - "default": "./index.js" + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" } }, - "./package.json": "./package.json" + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", + "@types/node": "^20.2.5", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.7.0", + "ts-node": "^10.9.1", "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" + "typedoc": "^0.24.6", + "typescript": "^5.0.4" }, "license": "ISC", "files": [ - "index.js", - "index.mjs", - "index.d.ts" + "dist" ], "engines": { - "node": ">=12" + "node": "14 || >=16.14" }, "prettier": { "semi": false, @@ -78,19 +92,17 @@ "endOfLine": "lf" }, "tap": { - "nyc-arg": [ - "--include=index.js" - ], + "coverage": false, "node-arg": [ "--expose-gc", - "--require", + "-r", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./index.js" + "path": "./dist/mjs/index.js" } ] } diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/node-gyp/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/cacache/node_modules/lru-cache/LICENSE rename to node_modules/node-gyp/node_modules/lru-cache/LICENSE diff --git a/node_modules/node-gyp/node_modules/lru-cache/index.js b/node_modules/node-gyp/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/node-gyp/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/node-gyp/node_modules/lru-cache/index.mjs b/node_modules/node-gyp/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/node-gyp/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/node-gyp/node_modules/lru-cache/package.json similarity index 52% rename from node_modules/@npmcli/git/node_modules/lru-cache/package.json rename to node_modules/node-gyp/node_modules/lru-cache/package.json index bae4a04839d1f..9684991727e7a 100644 --- a/node_modules/@npmcli/git/node_modules/lru-cache/package.json +++ b/node_modules/node-gyp/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.1", + "version": "7.18.3", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,74 +11,60 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" + "typedoc": "typedoc ./index.d.ts" }, - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", "exports": { - "./min": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.min.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.min.js" - } - }, ".": { "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" + "types": "./index.d.ts", + "default": "./index.mjs" }, "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" + "types": "./index.d.ts", + "default": "./index.js" } - } + }, + "./package.json": "./package.json" }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^20.2.5", + "@types/node": "^17.0.31", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", - "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.9.1", + "ts-node": "^10.7.0", "tslib": "^2.4.0", - "typedoc": "^0.24.6", - "typescript": "^5.0.4" + "typedoc": "^0.23.24", + "typescript": "^4.6.4" }, "license": "ISC", "files": [ - "dist" + "index.js", + "index.mjs", + "index.d.ts" ], "engines": { - "node": "14 || >=16.14" + "node": ">=12" }, "prettier": { "semi": false, @@ -92,17 +78,19 @@ "endOfLine": "lf" }, "tap": { - "coverage": false, + "nyc-arg": [ + "--include=index.js" + ], "node-arg": [ "--expose-gc", - "-r", + "--require", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./dist/mjs/index.js" + "path": "./index.js" } ] } diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE b/node_modules/normalize-package-data/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/hosted-git-info/node_modules/lru-cache/LICENSE rename to node_modules/normalize-package-data/node_modules/lru-cache/LICENSE diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.js b/node_modules/normalize-package-data/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs b/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/normalize-package-data/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/package.json b/node_modules/normalize-package-data/node_modules/lru-cache/package.json similarity index 52% rename from node_modules/hosted-git-info/node_modules/lru-cache/package.json rename to node_modules/normalize-package-data/node_modules/lru-cache/package.json index bae4a04839d1f..9684991727e7a 100644 --- a/node_modules/hosted-git-info/node_modules/lru-cache/package.json +++ b/node_modules/normalize-package-data/node_modules/lru-cache/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.1", + "version": "7.18.3", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,74 +11,60 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" + "typedoc": "typedoc ./index.d.ts" }, - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", "exports": { - "./min": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.min.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.min.js" - } - }, ".": { "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" + "types": "./index.d.ts", + "default": "./index.mjs" }, "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" + "types": "./index.d.ts", + "default": "./index.js" } - } + }, + "./package.json": "./package.json" }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^20.2.5", + "@types/node": "^17.0.31", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", - "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.9.1", + "ts-node": "^10.7.0", "tslib": "^2.4.0", - "typedoc": "^0.24.6", - "typescript": "^5.0.4" + "typedoc": "^0.23.24", + "typescript": "^4.6.4" }, "license": "ISC", "files": [ - "dist" + "index.js", + "index.mjs", + "index.d.ts" ], "engines": { - "node": "14 || >=16.14" + "node": ">=12" }, "prettier": { "semi": false, @@ -92,17 +78,19 @@ "endOfLine": "lf" }, "tap": { - "coverage": false, + "nyc-arg": [ + "--include=index.js" + ], "node-arg": [ "--expose-gc", - "-r", + "--require", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./dist/mjs/index.js" + "path": "./index.js" } ] } diff --git a/node_modules/path-scurry/node_modules/lru-cache/LICENSE b/node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE similarity index 100% rename from node_modules/path-scurry/node_modules/lru-cache/LICENSE rename to node_modules/npm-pick-manifest/node_modules/lru-cache/LICENSE diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json b/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..9684991727e7a --- /dev/null +++ b/node_modules/npm-pick-manifest/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..9684991727e7a --- /dev/null +++ b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/pacote/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pacote/node_modules/lru-cache/index.js b/node_modules/pacote/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/pacote/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/pacote/node_modules/lru-cache/index.mjs b/node_modules/pacote/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/pacote/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/pacote/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..9684991727e7a --- /dev/null +++ b/node_modules/pacote/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js deleted file mode 100644 index 02d76ec800a92..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.js +++ /dev/null @@ -1,1404 +0,0 @@ -"use strict"; -/** - * @module LRUCache - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.LRUCache = void 0; -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -exports.LRUCache = LRUCache; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js deleted file mode 100644 index 8d34a03041d25..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict";var x=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var j=(o,t,e)=>(x(o,t,"read from private field"),e?e.call(o):t.get(o)),I=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(x(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,N=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,M=globalThis.AbortSignal;if(typeof W>"u"){M=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new M;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!N.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!j(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=T;E=new WeakMap,I(R,E,!1);var C=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=R.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#I()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(N.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,C))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#U=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#U=()=>{};#u=()=>!1;#I(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#R=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#W(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#R=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#x(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#x(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#W(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#R(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#R(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#U(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#W(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#W(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),U=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",U&&O&&(l.returnedStale=!0)),U?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};exports.LRUCache=C; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json deleted file mode 100644 index 5bbefffbabee3..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/cjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "commonjs" -} diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js deleted file mode 100644 index 23b9754ad6c76..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.js +++ /dev/null @@ -1,1400 +0,0 @@ -/** - * @module LRUCache - */ -const perf = typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date; -const warned = new Set(); -/* c8 ignore start */ -const PROCESS = (typeof process === 'object' && !!process ? process : {}); -/* c8 ignore start */ -const emitWarning = (msg, type, code, fn) => { - typeof PROCESS.emitWarning === 'function' - ? PROCESS.emitWarning(msg, type, code, fn) - : console.error(`[${code}] ${type}: ${msg}`); -}; -let AC = globalThis.AbortController; -let AS = globalThis.AbortSignal; -/* c8 ignore start */ -if (typeof AC === 'undefined') { - //@ts-ignore - AS = class AbortSignal { - onabort; - _onabort = []; - reason; - aborted = false; - addEventListener(_, fn) { - this._onabort.push(fn); - } - }; - //@ts-ignore - AC = class AbortController { - constructor() { - warnACPolyfill(); - } - signal = new AS(); - abort(reason) { - if (this.signal.aborted) - return; - //@ts-ignore - this.signal.reason = reason; - //@ts-ignore - this.signal.aborted = true; - //@ts-ignore - for (const fn of this.signal._onabort) { - fn(reason); - } - this.signal.onabort?.(reason); - } - }; - let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; - const warnACPolyfill = () => { - if (!printACPolyfillWarning) - return; - printACPolyfillWarning = false; - emitWarning('AbortController is not defined. If using lru-cache in ' + - 'node 14, load an AbortController polyfill from the ' + - '`node-abort-controller` package. A minimal polyfill is ' + - 'provided for use by LRUCache.fetch(), but it should not be ' + - 'relied upon in other contexts (eg, passing it to other APIs that ' + - 'use AbortController/AbortSignal might have undesirable effects). ' + - 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); - }; -} -/* c8 ignore stop */ -const shouldWarn = (code) => !warned.has(code); -const TYPE = Symbol('type'); -const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); -/* c8 ignore start */ -// This is a little bit ridiculous, tbh. -// The maximum array length is 2^32-1 or thereabouts on most JS impls. -// And well before that point, you're caching the entire world, I mean, -// that's ~32GB of just integers for the next/prev links, plus whatever -// else to hold that many keys and values. Just filling the memory with -// zeroes at init time is brutal when you get that big. -// But why not be complete? -// Maybe in the future, these limits will have expanded. -const getUintArray = (max) => !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null; -/* c8 ignore stop */ -class ZeroArray extends Array { - constructor(size) { - super(size); - this.fill(0); - } -} -class Stack { - heap; - length; - // private constructor - static #constructing = false; - static create(max) { - const HeapCls = getUintArray(max); - if (!HeapCls) - return []; - Stack.#constructing = true; - const s = new Stack(max, HeapCls); - Stack.#constructing = false; - return s; - } - constructor(max, HeapCls) { - /* c8 ignore start */ - if (!Stack.#constructing) { - throw new TypeError('instantiate Stack using Stack.create(n)'); - } - /* c8 ignore stop */ - this.heap = new HeapCls(max); - this.length = 0; - } - push(n) { - this.heap[this.length++] = n; - } - pop() { - return this.heap[--this.length]; - } -} -/** - * Default export, the thing you're using this module to get. - * - * All properties from the options object (with the exception of - * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as - * normal public members. (`max` and `maxBase` are read-only getters.) - * Changing any of these will alter the defaults for subsequent method calls, - * but is otherwise safe. - */ -export class LRUCache { - // properties coming in from the options of these, only max and maxSize - // really *need* to be protected. The rest can be modified, as they just - // set defaults for various methods. - #max; - #maxSize; - #dispose; - #disposeAfter; - #fetchMethod; - /** - * {@link LRUCache.OptionsBase.ttl} - */ - ttl; - /** - * {@link LRUCache.OptionsBase.ttlResolution} - */ - ttlResolution; - /** - * {@link LRUCache.OptionsBase.ttlAutopurge} - */ - ttlAutopurge; - /** - * {@link LRUCache.OptionsBase.updateAgeOnGet} - */ - updateAgeOnGet; - /** - * {@link LRUCache.OptionsBase.updateAgeOnHas} - */ - updateAgeOnHas; - /** - * {@link LRUCache.OptionsBase.allowStale} - */ - allowStale; - /** - * {@link LRUCache.OptionsBase.noDisposeOnSet} - */ - noDisposeOnSet; - /** - * {@link LRUCache.OptionsBase.noUpdateTTL} - */ - noUpdateTTL; - /** - * {@link LRUCache.OptionsBase.maxEntrySize} - */ - maxEntrySize; - /** - * {@link LRUCache.OptionsBase.sizeCalculation} - */ - sizeCalculation; - /** - * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} - */ - noDeleteOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} - */ - noDeleteOnStaleGet; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} - */ - allowStaleOnFetchAbort; - /** - * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} - */ - allowStaleOnFetchRejection; - /** - * {@link LRUCache.OptionsBase.ignoreFetchAbort} - */ - ignoreFetchAbort; - // computed properties - #size; - #calculatedSize; - #keyMap; - #keyList; - #valList; - #next; - #prev; - #head; - #tail; - #free; - #disposed; - #sizes; - #starts; - #ttls; - #hasDispose; - #hasFetchMethod; - #hasDisposeAfter; - /** - * Do not call this method unless you need to inspect the - * inner workings of the cache. If anything returned by this - * object is modified in any way, strange breakage may occur. - * - * These fields are private for a reason! - * - * @internal - */ - static unsafeExposeInternals(c) { - return { - // properties - starts: c.#starts, - ttls: c.#ttls, - sizes: c.#sizes, - keyMap: c.#keyMap, - keyList: c.#keyList, - valList: c.#valList, - next: c.#next, - prev: c.#prev, - get head() { - return c.#head; - }, - get tail() { - return c.#tail; - }, - free: c.#free, - // methods - isBackgroundFetch: (p) => c.#isBackgroundFetch(p), - backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), - moveToTail: (index) => c.#moveToTail(index), - indexes: (options) => c.#indexes(options), - rindexes: (options) => c.#rindexes(options), - isStale: (index) => c.#isStale(index), - }; - } - // Protected read-only members - /** - * {@link LRUCache.OptionsBase.max} (read-only) - */ - get max() { - return this.#max; - } - /** - * {@link LRUCache.OptionsBase.maxSize} (read-only) - */ - get maxSize() { - return this.#maxSize; - } - /** - * The total computed size of items in the cache (read-only) - */ - get calculatedSize() { - return this.#calculatedSize; - } - /** - * The number of items stored in the cache (read-only) - */ - get size() { - return this.#size; - } - /** - * {@link LRUCache.OptionsBase.fetchMethod} (read-only) - */ - get fetchMethod() { - return this.#fetchMethod; - } - /** - * {@link LRUCache.OptionsBase.dispose} (read-only) - */ - get dispose() { - return this.#dispose; - } - /** - * {@link LRUCache.OptionsBase.disposeAfter} (read-only) - */ - get disposeAfter() { - return this.#disposeAfter; - } - constructor(options) { - const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options; - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer'); - } - const UintArray = max ? getUintArray(max) : Array; - if (!UintArray) { - throw new Error('invalid max value: ' + max); - } - this.#max = max; - this.#maxSize = maxSize; - this.maxEntrySize = maxEntrySize || this.#maxSize; - this.sizeCalculation = sizeCalculation; - if (this.sizeCalculation) { - if (!this.#maxSize && !this.maxEntrySize) { - throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function'); - } - } - if (fetchMethod !== undefined && - typeof fetchMethod !== 'function') { - throw new TypeError('fetchMethod must be a function if specified'); - } - this.#fetchMethod = fetchMethod; - this.#hasFetchMethod = !!fetchMethod; - this.#keyMap = new Map(); - this.#keyList = new Array(max).fill(undefined); - this.#valList = new Array(max).fill(undefined); - this.#next = new UintArray(max); - this.#prev = new UintArray(max); - this.#head = 0; - this.#tail = 0; - this.#free = Stack.create(max); - this.#size = 0; - this.#calculatedSize = 0; - if (typeof dispose === 'function') { - this.#dispose = dispose; - } - if (typeof disposeAfter === 'function') { - this.#disposeAfter = disposeAfter; - this.#disposed = []; - } - else { - this.#disposeAfter = undefined; - this.#disposed = undefined; - } - this.#hasDispose = !!this.#dispose; - this.#hasDisposeAfter = !!this.#disposeAfter; - this.noDisposeOnSet = !!noDisposeOnSet; - this.noUpdateTTL = !!noUpdateTTL; - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; - this.ignoreFetchAbort = !!ignoreFetchAbort; - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.#maxSize !== 0) { - if (!isPosInt(this.#maxSize)) { - throw new TypeError('maxSize must be a positive integer if specified'); - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError('maxEntrySize must be a positive integer if specified'); - } - this.#initializeSizeTracking(); - } - this.allowStale = !!allowStale; - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; - this.updateAgeOnGet = !!updateAgeOnGet; - this.updateAgeOnHas = !!updateAgeOnHas; - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1; - this.ttlAutopurge = !!ttlAutopurge; - this.ttl = ttl || 0; - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError('ttl must be a positive integer if specified'); - } - this.#initializeTTLTracking(); - } - // do not allow completely unbounded caches - if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { - throw new TypeError('At least one of max, maxSize, or ttl is required'); - } - if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { - const code = 'LRU_CACHE_UNBOUNDED'; - if (shouldWarn(code)) { - warned.add(code); - const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.'; - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); - } - } - } - /** - * Return the remaining TTL time for a given entry key - */ - getRemainingTTL(key) { - return this.#keyMap.has(key) ? Infinity : 0; - } - #initializeTTLTracking() { - const ttls = new ZeroArray(this.#max); - const starts = new ZeroArray(this.#max); - this.#ttls = ttls; - this.#starts = starts; - this.#setItemTTL = (index, ttl, start = perf.now()) => { - starts[index] = ttl !== 0 ? start : 0; - ttls[index] = ttl; - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.#isStale(index)) { - this.delete(this.#keyList[index]); - } - }, ttl + 1); - // unref() not supported on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - }; - this.#updateItemAge = index => { - starts[index] = ttls[index] !== 0 ? perf.now() : 0; - }; - this.#statusTTL = (status, index) => { - if (ttls[index]) { - const ttl = ttls[index]; - const start = starts[index]; - status.ttl = ttl; - status.start = start; - status.now = cachedNow || getNow(); - const age = status.now - start; - status.remainingTTL = ttl - age; - } - }; - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0; - const getNow = () => { - const n = perf.now(); - if (this.ttlResolution > 0) { - cachedNow = n; - const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); - // not available on all platforms - /* c8 ignore start */ - if (t.unref) { - t.unref(); - } - /* c8 ignore stop */ - } - return n; - }; - this.getRemainingTTL = key => { - const index = this.#keyMap.get(key); - if (index === undefined) { - return 0; - } - const ttl = ttls[index]; - const start = starts[index]; - if (ttl === 0 || start === 0) { - return Infinity; - } - const age = (cachedNow || getNow()) - start; - return ttl - age; - }; - this.#isStale = index => { - return (ttls[index] !== 0 && - starts[index] !== 0 && - (cachedNow || getNow()) - starts[index] > ttls[index]); - }; - } - // conditionally set private methods related to TTL - #updateItemAge = () => { }; - #statusTTL = () => { }; - #setItemTTL = () => { }; - /* c8 ignore stop */ - #isStale = () => false; - #initializeSizeTracking() { - const sizes = new ZeroArray(this.#max); - this.#calculatedSize = 0; - this.#sizes = sizes; - this.#removeItemSize = index => { - this.#calculatedSize -= sizes[index]; - sizes[index] = 0; - }; - this.#requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.#isBackgroundFetch(v)) { - return 0; - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function'); - } - size = sizeCalculation(v, k); - if (!isPosInt(size)) { - throw new TypeError('sizeCalculation return invalid (expect positive integer)'); - } - } - else { - throw new TypeError('invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation ' + - 'or size must be set.'); - } - } - return size; - }; - this.#addItemSize = (index, size, status) => { - sizes[index] = size; - if (this.#maxSize) { - const maxSize = this.#maxSize - sizes[index]; - while (this.#calculatedSize > maxSize) { - this.#evict(true); - } - } - this.#calculatedSize += sizes[index]; - if (status) { - status.entrySize = size; - status.totalCalculatedSize = this.#calculatedSize; - } - }; - } - #removeItemSize = _i => { }; - #addItemSize = (_i, _s, _st) => { }; - #requireSize = (_k, _v, size, sizeCalculation) => { - if (size || sizeCalculation) { - throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); - } - return 0; - }; - *#indexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#tail; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#head) { - break; - } - else { - i = this.#prev[i]; - } - } - } - } - *#rindexes({ allowStale = this.allowStale } = {}) { - if (this.#size) { - for (let i = this.#head; true;) { - if (!this.#isValidIndex(i)) { - break; - } - if (allowStale || !this.#isStale(i)) { - yield i; - } - if (i === this.#tail) { - break; - } - else { - i = this.#next[i]; - } - } - } - } - #isValidIndex(index) { - return (index !== undefined && - this.#keyMap.get(this.#keyList[index]) === index); - } - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - *entries() { - for (const i of this.#indexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Inverse order version of {@link LRUCache.entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - *rentries() { - for (const i of this.#rindexes()) { - if (this.#valList[i] !== undefined && - this.#keyList[i] !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield [this.#keyList[i], this.#valList[i]]; - } - } - } - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - *keys() { - for (const i of this.#indexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Inverse order version of {@link LRUCache.keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - *rkeys() { - for (const i of this.#rindexes()) { - const k = this.#keyList[i]; - if (k !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield k; - } - } - } - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - *values() { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Inverse order version of {@link LRUCache.values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - *rvalues() { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - if (v !== undefined && - !this.#isBackgroundFetch(this.#valList[i])) { - yield this.#valList[i]; - } - } - } - /** - * Iterating over the cache itself yields the same results as - * {@link LRUCache.entries} - */ - [Symbol.iterator]() { - return this.entries(); - } - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - find(fn, getOptions = {}) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - if (fn(value, this.#keyList[i], this)) { - return this.get(this.#keyList[i], getOptions); - } - } - } - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - * Does not iterate over stale values. - */ - forEach(fn, thisp = this) { - for (const i of this.#indexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * The same as {@link LRUCache.forEach} but items are iterated over in - * reverse order. (ie, less recently used items are iterated over first.) - */ - rforEach(fn, thisp = this) { - for (const i of this.#rindexes()) { - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined) - continue; - fn.call(thisp, value, this.#keyList[i], this); - } - } - /** - * Delete any stale entries. Returns true if anything was removed, - * false otherwise. - */ - purgeStale() { - let deleted = false; - for (const i of this.#rindexes({ allowStale: true })) { - if (this.#isStale(i)) { - this.delete(this.#keyList[i]); - deleted = true; - } - } - return deleted; - } - /** - * Return an array of [key, {@link LRUCache.Entry}] tuples which can be - * passed to cache.load() - */ - dump() { - const arr = []; - for (const i of this.#indexes({ allowStale: true })) { - const key = this.#keyList[i]; - const v = this.#valList[i]; - const value = this.#isBackgroundFetch(v) - ? v.__staleWhileFetching - : v; - if (value === undefined || key === undefined) - continue; - const entry = { value }; - if (this.#ttls && this.#starts) { - entry.ttl = this.#ttls[i]; - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.#starts[i]; - entry.start = Math.floor(Date.now() - age); - } - if (this.#sizes) { - entry.size = this.#sizes[i]; - } - arr.unshift([key, entry]); - } - return arr; - } - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - load(arr) { - this.clear(); - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset, so that - // we get the intended remaining TTL, no matter how long it's - // been on ice. - // - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start; - entry.start = perf.now() - age; - } - this.set(key, entry.value, entry); - } - } - /** - * Add a value to the cache. - * - * Note: if `undefined` is specified as a value, this is an alias for - * {@link LRUCache#delete} - */ - set(k, v, setOptions = {}) { - if (v === undefined) { - this.delete(k); - return this; - } - const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; - let { noUpdateTTL = this.noUpdateTTL } = setOptions; - const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss'; - status.maxEntrySizeExceeded = true; - } - // have to delete, in case something is there already. - this.delete(k); - return this; - } - let index = this.#size === 0 ? undefined : this.#keyMap.get(k); - if (index === undefined) { - // addition - index = (this.#size === 0 - ? this.#tail - : this.#free.length !== 0 - ? this.#free.pop() - : this.#size === this.#max - ? this.#evict(false) - : this.#size); - this.#keyList[index] = k; - this.#valList[index] = v; - this.#keyMap.set(k, index); - this.#next[this.#tail] = index; - this.#prev[index] = this.#tail; - this.#tail = index; - this.#size++; - this.#addItemSize(index, size, status); - if (status) - status.set = 'add'; - noUpdateTTL = false; - } - else { - // update - this.#moveToTail(index); - const oldVal = this.#valList[index]; - if (v !== oldVal) { - if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')); - const { __staleWhileFetching: s } = oldVal; - if (s !== undefined && !noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(s, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([s, k, 'set']); - } - } - } - else if (!noDisposeOnSet) { - if (this.#hasDispose) { - this.#dispose?.(oldVal, k, 'set'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([oldVal, k, 'set']); - } - } - this.#removeItemSize(index); - this.#addItemSize(index, size, status); - this.#valList[index] = v; - if (status) { - status.set = 'replace'; - const oldValue = oldVal && this.#isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal; - if (oldValue !== undefined) - status.oldValue = oldValue; - } - } - else if (status) { - status.set = 'update'; - } - } - if (ttl !== 0 && !this.#ttls) { - this.#initializeTTLTracking(); - } - if (this.#ttls) { - if (!noUpdateTTL) { - this.#setItemTTL(index, ttl, start); - } - if (status) - this.#statusTTL(status, index); - } - if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return this; - } - /** - * Evict the least recently used item, returning its value or - * `undefined` if cache is empty. - */ - pop() { - try { - while (this.#size) { - const val = this.#valList[this.#head]; - this.#evict(true); - if (this.#isBackgroundFetch(val)) { - if (val.__staleWhileFetching) { - return val.__staleWhileFetching; - } - } - else if (val !== undefined) { - return val; - } - } - } - finally { - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } - } - #evict(free) { - const head = this.#head; - const k = this.#keyList[head]; - const v = this.#valList[head]; - if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'evict'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'evict']); - } - } - this.#removeItemSize(head); - // if we aren't about to use the index, then null these out - if (free) { - this.#keyList[head] = undefined; - this.#valList[head] = undefined; - this.#free.push(head); - } - if (this.#size === 1) { - this.#head = this.#tail = 0; - this.#free.length = 0; - } - else { - this.#head = this.#next[head]; - } - this.#keyMap.delete(k); - this.#size--; - return head; - } - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless - * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. - */ - has(k, hasOptions = {}) { - const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v) && - v.__staleWhileFetching === undefined) { - return false; - } - if (!this.#isStale(index)) { - if (updateAgeOnHas) { - this.#updateItemAge(index); - } - if (status) { - status.has = 'hit'; - this.#statusTTL(status, index); - } - return true; - } - else if (status) { - status.has = 'stale'; - this.#statusTTL(status, index); - } - } - else if (status) { - status.has = 'miss'; - } - return false; - } - /** - * Like {@link LRUCache#get} but doesn't update recency or delete stale - * items. - * - * Returns `undefined` if the item is stale, unless - * {@link LRUCache.OptionsBase.allowStale} is set. - */ - peek(k, peekOptions = {}) { - const { allowStale = this.allowStale } = peekOptions; - const index = this.#keyMap.get(k); - if (index !== undefined && - (allowStale || !this.#isStale(index))) { - const v = this.#valList[index]; - // either stale and allowed, or forcing a refresh of non-stale value - return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; - } - } - #backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - return v; - } - const ac = new AC(); - const { signal } = options; - // when/if our AC signals, then stop listening to theirs. - signal?.addEventListener('abort', () => ac.abort(signal.reason), { - signal: ac.signal, - }); - const fetchOpts = { - signal: ac.signal, - options, - context, - }; - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal; - const ignoreAbort = options.ignoreFetchAbort && v !== undefined; - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true; - options.status.fetchError = ac.signal.reason; - if (ignoreAbort) - options.status.fetchAbortIgnored = true; - } - else { - options.status.fetchResolved = true; - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason); - } - // either we didn't abort, and are still here, or we did, and ignored - const bf = p; - if (this.#valList[index] === p) { - if (v === undefined) { - if (bf.__staleWhileFetching) { - this.#valList[index] = bf.__staleWhileFetching; - } - else { - this.delete(k); - } - } - else { - if (options.status) - options.status.fetchUpdated = true; - this.set(k, v, fetchOpts.options); - } - } - return v; - }; - const eb = (er) => { - if (options.status) { - options.status.fetchRejected = true; - options.status.fetchError = er; - } - return fetchFail(er); - }; - const fetchFail = (er) => { - const { aborted } = ac.signal; - const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; - const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; - const noDelete = allowStale || options.noDeleteOnFetchRejection; - const bf = p; - if (this.#valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || bf.__staleWhileFetching === undefined; - if (del) { - this.delete(k); - } - else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.#valList[index] = bf.__staleWhileFetching; - } - } - if (allowStale) { - if (options.status && bf.__staleWhileFetching !== undefined) { - options.status.returnedStale = true; - } - return bf.__staleWhileFetching; - } - else if (bf.__returned === bf) { - throw er; - } - }; - const pcall = (res, rej) => { - const fmp = this.#fetchMethod?.(k, v, fetchOpts); - if (fmp && fmp instanceof Promise) { - fmp.then(v => res(v === undefined ? undefined : v), rej); - } - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if (!options.ignoreFetchAbort || - options.allowStaleOnFetchAbort) { - res(undefined); - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true); - } - } - }); - }; - if (options.status) - options.status.fetchDispatched = true; - const p = new Promise(pcall).then(cb, eb); - const bf = Object.assign(p, { - __abortController: ac, - __staleWhileFetching: v, - __returned: undefined, - }); - if (index === undefined) { - // internal, don't expose status. - this.set(k, bf, { ...fetchOpts.options, status: undefined }); - index = this.#keyMap.get(k); - } - else { - this.#valList[index] = bf; - } - return bf; - } - #isBackgroundFetch(p) { - if (!this.#hasFetchMethod) - return false; - const b = p; - return (!!b && - b instanceof Promise && - b.hasOwnProperty('__staleWhileFetching') && - b.__abortController instanceof AC); - } - async fetch(k, fetchOptions = {}) { - const { - // get options - allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; - if (!this.#hasFetchMethod) { - if (status) - status.fetch = 'get'; - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }); - } - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - }; - let index = this.#keyMap.get(k); - if (index === undefined) { - if (status) - status.fetch = 'miss'; - const p = this.#backgroundFetch(k, index, options, context); - return (p.__returned = p); - } - else { - // in cache, maybe already fetching - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - const stale = allowStale && v.__staleWhileFetching !== undefined; - if (status) { - status.fetch = 'inflight'; - if (stale) - status.returnedStale = true; - } - return stale ? v.__staleWhileFetching : (v.__returned = v); - } - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.#isStale(index); - if (!forceRefresh && !isStale) { - if (status) - status.fetch = 'hit'; - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - if (status) - this.#statusTTL(status, index); - return v; - } - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.#backgroundFetch(k, index, options, context); - const hasStale = p.__staleWhileFetching !== undefined; - const staleVal = hasStale && allowStale; - if (status) { - status.fetch = isStale ? 'stale' : 'refresh'; - if (staleVal && isStale) - status.returnedStale = true; - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p); - } - } - /** - * Return a value from the cache. Will update the recency of the cache - * entry found. - * - * If the key is not found, get() will return `undefined`. - */ - get(k, getOptions = {}) { - const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; - const index = this.#keyMap.get(k); - if (index !== undefined) { - const value = this.#valList[index]; - const fetching = this.#isBackgroundFetch(value); - if (status) - this.#statusTTL(status, index); - if (this.#isStale(index)) { - if (status) - status.get = 'stale'; - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k); - } - if (status && allowStale) - status.returnedStale = true; - return allowStale ? value : undefined; - } - else { - if (status && - allowStale && - value.__staleWhileFetching !== undefined) { - status.returnedStale = true; - } - return allowStale ? value.__staleWhileFetching : undefined; - } - } - else { - if (status) - status.get = 'hit'; - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching; - } - this.#moveToTail(index); - if (updateAgeOnGet) { - this.#updateItemAge(index); - } - return value; - } - } - else if (status) { - status.get = 'miss'; - } - } - #connect(p, n) { - this.#prev[n] = p; - this.#next[p] = n; - } - #moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.#tail) { - if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#connect(this.#prev[index], this.#next[index]); - } - this.#connect(this.#tail, index); - this.#tail = index; - } - } - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - delete(k) { - let deleted = false; - if (this.#size !== 0) { - const index = this.#keyMap.get(k); - if (index !== undefined) { - deleted = true; - if (this.#size === 1) { - this.clear(); - } - else { - this.#removeItemSize(index); - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else if (this.#hasDispose || this.#hasDisposeAfter) { - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - this.#keyMap.delete(k); - this.#keyList[index] = undefined; - this.#valList[index] = undefined; - if (index === this.#tail) { - this.#tail = this.#prev[index]; - } - else if (index === this.#head) { - this.#head = this.#next[index]; - } - else { - this.#next[this.#prev[index]] = this.#next[index]; - this.#prev[this.#next[index]] = this.#prev[index]; - } - this.#size--; - this.#free.push(index); - } - } - } - if (this.#hasDisposeAfter && this.#disposed?.length) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - return deleted; - } - /** - * Clear the cache entirely, throwing away all values. - */ - clear() { - for (const index of this.#rindexes({ allowStale: true })) { - const v = this.#valList[index]; - if (this.#isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')); - } - else { - const k = this.#keyList[index]; - if (this.#hasDispose) { - this.#dispose?.(v, k, 'delete'); - } - if (this.#hasDisposeAfter) { - this.#disposed?.push([v, k, 'delete']); - } - } - } - this.#keyMap.clear(); - this.#valList.fill(undefined); - this.#keyList.fill(undefined); - if (this.#ttls && this.#starts) { - this.#ttls.fill(0); - this.#starts.fill(0); - } - if (this.#sizes) { - this.#sizes.fill(0); - } - this.#head = 0; - this.#tail = 0; - this.#free.length = 0; - this.#calculatedSize = 0; - this.#size = 0; - if (this.#hasDisposeAfter && this.#disposed) { - const dt = this.#disposed; - let task; - while ((task = dt?.shift())) { - this.#disposeAfter?.(...task); - } - } - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js deleted file mode 100644 index 5a16b3940d6df..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/index.min.js +++ /dev/null @@ -1,2 +0,0 @@ -var U=(o,t,e)=>{if(!t.has(o))throw TypeError("Cannot "+e)};var I=(o,t,e)=>(U(o,t,"read from private field"),e?e.call(o):t.get(o)),j=(o,t,e)=>{if(t.has(o))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(o):t.set(o,e)},D=(o,t,e,i)=>(U(o,t,"write to private field"),i?i.call(o,e):t.set(o,e),e);var v=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,M=new Set,L=typeof process=="object"&&process?process:{},P=(o,t,e,i)=>{typeof L.emitWarning=="function"?L.emitWarning(o,t,e,i):console.error(`[${e}] ${t}: ${o}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let o=L.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{o&&(o=!1,P("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=o=>!M.has(o),Y=Symbol("type"),m=o=>o&&o===Math.floor(o)&&o>0&&isFinite(o),H=o=>m(o)?o<=Math.pow(2,8)?Uint8Array:o<=Math.pow(2,16)?Uint16Array:o<=Math.pow(2,32)?Uint32Array:o<=Number.MAX_SAFE_INTEGER?z:null:null,z=class extends Array{constructor(t){super(t),this.fill(0)}},E,T=class{heap;length;static create(t){let e=H(t);if(!e)return[];D(T,E,!0);let i=new T(t,e);return D(T,E,!1),i}constructor(t,e){if(!I(T,E))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},C=T;E=new WeakMap,j(C,E,!1);var R=class{#d;#f;#g;#p;#C;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#s;#w;#n;#i;#t;#l;#c;#o;#h;#S;#r;#m;#F;#_;#b;#T;#a;static unsafeExposeInternals(t){return{starts:t.#F,ttls:t.#_,sizes:t.#m,keyMap:t.#n,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#S,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#D(e,i,s,n),moveToTail:e=>t.#v(e),indexes:e=>t.#y(e),rindexes:e=>t.#A(e),isStale:e=>t.#u(e)}}get max(){return this.#d}get maxSize(){return this.#f}get calculatedSize(){return this.#w}get size(){return this.#s}get fetchMethod(){return this.#C}get dispose(){return this.#g}get disposeAfter(){return this.#p}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:a,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,noDeleteOnFetchRejection:l,noDeleteOnStaleGet:w,allowStaleOnFetchRejection:y,allowStaleOnFetchAbort:p,ignoreFetchAbort:_}=t;if(e!==0&&!m(e))throw new TypeError("max option must be a nonnegative integer");let O=e?H(e):Array;if(!O)throw new Error("invalid max value: "+e);if(this.#d=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#C=S,this.#T=!!S,this.#n=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new O(e),this.#c=new O(e),this.#o=0,this.#h=0,this.#S=C.create(e),this.#s=0,this.#w=0,typeof g=="function"&&(this.#g=g),typeof b=="function"?(this.#p=b,this.#r=[]):(this.#p=void 0,this.#r=void 0),this.#b=!!this.#g,this.#a=!!this.#p,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!l,this.allowStaleOnFetchRejection=!!y,this.allowStaleOnFetchAbort=!!p,this.ignoreFetchAbort=!!_,this.maxEntrySize!==0){if(this.#f!==0&&!m(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!m(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#j()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!w,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!a,this.ttlResolution=m(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!m(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#L()}if(this.#d===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#d&&!this.#f){let A="LRU_CACHE_UNBOUNDED";V(A)&&(M.add(A),P("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",A,R))}}getRemainingTTL(t){return this.#n.has(t)?1/0:0}#L(){let t=new z(this.#d),e=new z(this.#d);this.#_=t,this.#F=e,this.#x=(n,h,a=v.now())=>{if(e[n]=h!==0?a:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#u(n)&&this.delete(this.#i[n])},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?v.now():0},this.#O=(n,h)=>{if(t[h]){let a=t[h],r=e[h];n.ttl=a,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=a-g}};let i=0,s=()=>{let n=v.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#n.get(n);if(h===void 0)return 0;let a=t[h],r=e[h];if(a===0||r===0)return 1/0;let g=(i||s())-r;return a-g},this.#u=n=>t[n]!==0&&e[n]!==0&&(i||s())-e[n]>t[n]}#z=()=>{};#O=()=>{};#x=()=>{};#u=()=>!1;#j(){let t=new z(this.#d);this.#w=0,this.#m=t,this.#E=e=>{this.#w-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!m(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!m(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#W=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#w>n;)this.#R(!0)}this.#w+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#w)}}#E=t=>{};#W=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#y({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#h;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#A({allowStale:t=this.allowStale}={}){if(this.#s)for(let e=this.#o;!(!this.#U(e)||((t||!this.#u(e))&&(yield e),e===this.#h));)e=this.#l[e]}#U(t){return t!==void 0&&this.#n.get(this.#i[t])===t}*entries(){for(let t of this.#y())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#y()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#y())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}find(t,e={}){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#y()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#A({allowStale:!0}))this.#u(e)&&(this.delete(this.#i[e]),t=!0);return t}dump(){let t=[];for(let e of this.#y({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#_&&this.#F){h.ttl=this.#_[e];let a=v.now()-this.#F[e];h.start=Math.floor(Date.now()-a)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=v.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:a=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,a);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.delete(t),this;let f=this.#s===0?void 0:this.#n.get(t);if(f===void 0)f=this.#s===0?this.#h:this.#S.length!==0?this.#S.pop():this.#s===this.#d?this.#R(!1):this.#s,this.#i[f]=t,this.#t[f]=e,this.#n.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#s++,this.#W(f,b,r),r&&(r.set="add"),g=!1;else{this.#v(f);let u=this.#t[f];if(e!==u){if(this.#T&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#b&&this.#g?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#b&&this.#g?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#E(f),this.#W(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#_&&this.#L(),this.#_&&(g||this.#x(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#p?.(...c)}return this}pop(){try{for(;this.#s;){let t=this.#t[this.#o];if(this.#R(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}}#R(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#T&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#E(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#S.push(e)),this.#s===1?(this.#o=this.#h=0,this.#S.length=0):this.#o=this.#l[e],this.#n.delete(i),this.#s--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#n.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#u(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#n.get(t);if(s!==void 0&&(i||!this.#u(s))){let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}}#D(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:a}=i;a?.addEventListener("abort",()=>h.abort(a.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!S)return f(h.signal.reason);let y=c;return this.#t[e]===c&&(d===void 0?y.__staleWhileFetching?this.#t[e]=y.__staleWhileFetching:this.delete(t):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,l=S&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,y=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!y||p.__staleWhileFetching===void 0?this.delete(t):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let l=this.#C?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#n.get(t)):this.#t[e]=F,F}#e(t){if(!this.#T)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:a=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:l,signal:w}=e;if(!this.#T)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let y={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:a,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:l,signal:w},p=this.#n.get(t);if(p===void 0){l&&(l.fetch="miss");let _=this.#D(t,p,y,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let G=i&&_.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",G&&(l.returnedStale=!0)),G?_.__staleWhileFetching:_.__returned=_}let O=this.#u(p);if(!S&&!O)return l&&(l.fetch="hit"),this.#v(p),s&&this.#z(p),l&&this.#O(l,p),_;let A=this.#D(t,p,y,d),x=A.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=O?"stale":"refresh",x&&O&&(l.returnedStale=!0)),x?A.__staleWhileFetching:A.__returned=A}}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,a=this.#n.get(t);if(a!==void 0){let r=this.#t[a],g=this.#e(r);return h&&this.#O(h,a),this.#u(a)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.delete(t),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#v(a),s&&this.#z(a),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#v(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){let e=!1;if(this.#s!==0){let i=this.#n.get(t);if(i!==void 0)if(e=!0,this.#s===1)this.clear();else{this.#E(i);let s=this.#t[i];this.#e(s)?s.__abortController.abort(new Error("deleted")):(this.#b||this.#a)&&(this.#b&&this.#g?.(s,t,"delete"),this.#a&&this.#r?.push([s,t,"delete"])),this.#n.delete(t),this.#i[i]=void 0,this.#t[i]=void 0,i===this.#h?this.#h=this.#c[i]:i===this.#o?this.#o=this.#l[i]:(this.#l[this.#c[i]]=this.#l[i],this.#c[this.#l[i]]=this.#c[i]),this.#s--,this.#S.push(i)}}if(this.#a&&this.#r?.length){let i=this.#r,s;for(;s=i?.shift();)this.#p?.(...s)}return e}clear(){for(let t of this.#A({allowStale:!0})){let e=this.#t[t];if(this.#e(e))e.__abortController.abort(new Error("deleted"));else{let i=this.#i[t];this.#b&&this.#g?.(e,i,"delete"),this.#a&&this.#r?.push([e,i,"delete"])}}if(this.#n.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#_&&this.#F&&(this.#_.fill(0),this.#F.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#S.length=0,this.#w=0,this.#s=0,this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#p?.(...e)}}};export{R as LRUCache}; -//# sourceMappingURL=index.min.js.map diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json deleted file mode 100644 index 3dbc1ca591c05..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/dist/mjs/package.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "type": "module" -} diff --git a/node_modules/path-scurry/node_modules/lru-cache/package.json b/node_modules/path-scurry/node_modules/lru-cache/package.json deleted file mode 100644 index bae4a04839d1f..0000000000000 --- a/node_modules/path-scurry/node_modules/lru-cache/package.json +++ /dev/null @@ -1,108 +0,0 @@ -{ - "name": "lru-cache", - "description": "A cache object that deletes the least-recently-used items.", - "version": "10.0.1", - "author": "Isaac Z. Schlueter ", - "keywords": [ - "mru", - "lru", - "cache" - ], - "sideEffects": false, - "scripts": { - "build": "npm run prepare", - "preprepare": "rm -rf dist", - "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", - "postprepare": "bash fixup.sh", - "pretest": "npm run prepare", - "presnap": "npm run prepare", - "test": "c8 tap", - "snap": "c8 tap", - "preversion": "npm test", - "postversion": "npm publish", - "prepublishOnly": "git push origin --follow-tags", - "format": "prettier --write .", - "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", - "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", - "prebenchmark": "npm run prepare", - "benchmark": "make -C benchmark", - "preprofile": "npm run prepare", - "profile": "make -C benchmark profile" - }, - "main": "./dist/cjs/index.js", - "module": "./dist/mjs/index.js", - "exports": { - "./min": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.min.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.min.js" - } - }, - ".": { - "import": { - "types": "./dist/mjs/index.d.ts", - "default": "./dist/mjs/index.js" - }, - "require": { - "types": "./dist/cjs/index.d.ts", - "default": "./dist/cjs/index.js" - } - } - }, - "repository": "git://github.com/isaacs/node-lru-cache.git", - "devDependencies": { - "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^20.2.5", - "@types/tap": "^15.0.6", - "benchmark": "^2.1.4", - "c8": "^7.11.2", - "clock-mock": "^1.0.6", - "esbuild": "^0.17.11", - "eslint-config-prettier": "^8.5.0", - "marked": "^4.2.12", - "mkdirp": "^2.1.5", - "prettier": "^2.6.2", - "size-limit": "^7.0.8", - "tap": "^16.3.4", - "ts-node": "^10.9.1", - "tslib": "^2.4.0", - "typedoc": "^0.24.6", - "typescript": "^5.0.4" - }, - "license": "ISC", - "files": [ - "dist" - ], - "engines": { - "node": "14 || >=16.14" - }, - "prettier": { - "semi": false, - "printWidth": 70, - "tabWidth": 2, - "useTabs": false, - "singleQuote": true, - "jsxSingleQuote": false, - "bracketSameLine": true, - "arrowParens": "avoid", - "endOfLine": "lf" - }, - "tap": { - "coverage": false, - "node-arg": [ - "--expose-gc", - "-r", - "ts-node/register" - ], - "ts": false - }, - "size-limit": [ - { - "path": "./dist/mjs/index.js" - } - ] -} diff --git a/node_modules/sigstore/node_modules/lru-cache/LICENSE b/node_modules/sigstore/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/sigstore/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/sigstore/node_modules/lru-cache/index.js b/node_modules/sigstore/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/sigstore/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/sigstore/node_modules/lru-cache/index.mjs b/node_modules/sigstore/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/sigstore/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/sigstore/node_modules/lru-cache/package.json b/node_modules/sigstore/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..9684991727e7a --- /dev/null +++ b/node_modules/sigstore/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/node_modules/tuf-js/node_modules/lru-cache/LICENSE b/node_modules/tuf-js/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/tuf-js/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.js b/node_modules/tuf-js/node_modules/lru-cache/index.js new file mode 100644 index 0000000000000..48e99fe5e5a70 --- /dev/null +++ b/node_modules/tuf-js/node_modules/lru-cache/index.js @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +module.exports = LRUCache diff --git a/node_modules/tuf-js/node_modules/lru-cache/index.mjs b/node_modules/tuf-js/node_modules/lru-cache/index.mjs new file mode 100644 index 0000000000000..4a0b4813ec515 --- /dev/null +++ b/node_modules/tuf-js/node_modules/lru-cache/index.mjs @@ -0,0 +1,1227 @@ +const perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const hasAbortController = typeof AbortController === 'function' + +// minimal backwards-compatibility polyfill +// this doesn't have nearly all the checks and whatnot that +// actual AbortController/Signal has, but it's enough for +// our purposes, and if used properly, behaves the same. +const AC = hasAbortController + ? AbortController + : class AbortController { + constructor() { + this.signal = new AS() + } + abort(reason = new Error('This operation was aborted')) { + this.signal.reason = this.signal.reason || reason + this.signal.aborted = true + this.signal.dispatchEvent({ + type: 'abort', + target: this.signal, + }) + } + } + +const hasAbortSignal = typeof AbortSignal === 'function' +// Some polyfills put this on the AC class, not global +const hasACAbortSignal = typeof AC.AbortSignal === 'function' +const AS = hasAbortSignal + ? AbortSignal + : hasACAbortSignal + ? AC.AbortController + : class AbortSignal { + constructor() { + this.reason = undefined + this.aborted = false + this._listeners = [] + } + dispatchEvent(e) { + if (e.type === 'abort') { + this.aborted = true + this.onabort(e) + this._listeners.forEach(f => f(e), this) + } + } + onabort() {} + addEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners.push(fn) + } + } + removeEventListener(ev, fn) { + if (ev === 'abort') { + this._listeners = this._listeners.filter(f => f !== fn) + } + } + } + +const warned = new Set() +const deprecatedOption = (opt, instead) => { + const code = `LRU_CACHE_OPTION_${opt}` + if (shouldWarn(code)) { + warn(code, `${opt} option`, `options.${instead}`, LRUCache) + } +} +const deprecatedMethod = (method, instead) => { + const code = `LRU_CACHE_METHOD_${method}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, method) + warn(code, `${method} method`, `cache.${instead}()`, get) + } +} +const deprecatedProperty = (field, instead) => { + const code = `LRU_CACHE_PROPERTY_${field}` + if (shouldWarn(code)) { + const { prototype } = LRUCache + const { get } = Object.getOwnPropertyDescriptor(prototype, field) + warn(code, `${field} property`, `cache.${instead}`, get) + } +} + +const emitWarning = (...a) => { + typeof process === 'object' && + process && + typeof process.emitWarning === 'function' + ? process.emitWarning(...a) + : console.error(...a) +} + +const shouldWarn = code => !warned.has(code) + +const warn = (code, what, instead, fn) => { + warned.add(code) + const msg = `The ${what} is deprecated. Please use ${instead} instead.` + emitWarning(msg, 'DeprecationWarning', code, fn) +} + +const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) + +/* istanbul ignore next - This is a little bit ridiculous, tbh. + * The maximum array length is 2^32-1 or thereabouts on most JS impls. + * And well before that point, you're caching the entire world, I mean, + * that's ~32GB of just integers for the next/prev links, plus whatever + * else to hold that many keys and values. Just filling the memory with + * zeroes at init time is brutal when you get that big. + * But why not be complete? + * Maybe in the future, these limits will have expanded. */ +const getUintArray = max => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null + +class ZeroArray extends Array { + constructor(size) { + super(size) + this.fill(0) + } +} + +class Stack { + constructor(max) { + if (max === 0) { + return [] + } + const UintArray = getUintArray(max) + this.heap = new UintArray(max) + this.length = 0 + } + push(n) { + this.heap[this.length++] = n + } + pop() { + return this.heap[--this.length] + } +} + +class LRUCache { + constructor(options = {}) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + fetchContext, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + // deprecated options, don't trigger a warning for getting them if + // the thing being passed in is another LRUCache we're copying. + const { length, maxAge, stale } = + options instanceof LRUCache ? {} : options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.max = max + this.maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.maxSize + this.sizeCalculation = sizeCalculation || length + if (this.sizeCalculation) { + if (!this.maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + this.fetchMethod = fetchMethod || null + if (this.fetchMethod && typeof this.fetchMethod !== 'function') { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + + this.fetchContext = fetchContext + if (!this.fetchMethod && fetchContext !== undefined) { + throw new TypeError( + 'cannot set fetchContext without fetchMethod' + ) + } + + this.keyMap = new Map() + this.keyList = new Array(max).fill(null) + this.valList = new Array(max).fill(null) + this.next = new UintArray(max) + this.prev = new UintArray(max) + this.head = 0 + this.tail = 0 + this.free = new Stack(max) + this.initialFill = 1 + this.size = 0 + + if (typeof dispose === 'function') { + this.dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.disposeAfter = disposeAfter + this.disposed = [] + } else { + this.disposeAfter = null + this.disposed = null + } + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.maxSize !== 0) { + if (!isPosInt(this.maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.initializeSizeTracking() + } + + this.allowStale = !!allowStale || !!stale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || maxAge || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.max && !this.maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + + if (stale) { + deprecatedOption('stale', 'allowStale') + } + if (maxAge) { + deprecatedOption('maxAge', 'ttl') + } + if (length) { + deprecatedOption('length', 'sizeCalculation') + } + } + + getRemainingTTL(key) { + return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 + } + + initializeTTLTracking() { + this.ttls = new ZeroArray(this.max) + this.starts = new ZeroArray(this.max) + + this.setItemTTL = (index, ttl, start = perf.now()) => { + this.starts[index] = ttl !== 0 ? start : 0 + this.ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.isStale(index)) { + this.delete(this.keyList[index]) + } + }, ttl + 1) + /* istanbul ignore else - unref() not supported on all platforms */ + if (t.unref) { + t.unref() + } + } + } + + this.updateItemAge = index => { + this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 + } + + this.statusTTL = (status, index) => { + if (status) { + status.ttl = this.ttls[index] + status.start = this.starts[index] + status.now = cachedNow || getNow() + status.remainingTTL = status.now + status.ttl - status.start + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + /* istanbul ignore else - not available on all platforms */ + if (t.unref) { + t.unref() + } + } + return n + } + + this.getRemainingTTL = key => { + const index = this.keyMap.get(key) + if (index === undefined) { + return 0 + } + return this.ttls[index] === 0 || this.starts[index] === 0 + ? Infinity + : this.starts[index] + + this.ttls[index] - + (cachedNow || getNow()) + } + + this.isStale = index => { + return ( + this.ttls[index] !== 0 && + this.starts[index] !== 0 && + (cachedNow || getNow()) - this.starts[index] > + this.ttls[index] + ) + } + } + updateItemAge(_index) {} + statusTTL(_status, _index) {} + setItemTTL(_index, _ttl, _start) {} + isStale(_index) { + return false + } + + initializeSizeTracking() { + this.calculatedSize = 0 + this.sizes = new ZeroArray(this.max) + this.removeItemSize = index => { + this.calculatedSize -= this.sizes[index] + this.sizes[index] = 0 + } + this.requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + + 'must be set.' + ) + } + } + return size + } + this.addItemSize = (index, size, status) => { + this.sizes[index] = size + if (this.maxSize) { + const maxSize = this.maxSize - this.sizes[index] + while (this.calculatedSize > maxSize) { + this.evict(true) + } + } + this.calculatedSize += this.sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.calculatedSize + } + } + } + removeItemSize(_index) {} + addItemSize(_index, _size) {} + requireSize(_k, _v, size, sizeCalculation) { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + } + + *indexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.tail; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.head) { + break + } else { + i = this.prev[i] + } + } + } + } + + *rindexes({ allowStale = this.allowStale } = {}) { + if (this.size) { + for (let i = this.head; true; ) { + if (!this.isValidIndex(i)) { + break + } + if (allowStale || !this.isStale(i)) { + yield i + } + if (i === this.tail) { + break + } else { + i = this.next[i] + } + } + } + } + + isValidIndex(index) { + return ( + index !== undefined && + this.keyMap.get(this.keyList[index]) === index + ) + } + + *entries() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + *rentries() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield [this.keyList[i], this.valList[i]] + } + } + } + + *keys() { + for (const i of this.indexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + *rkeys() { + for (const i of this.rindexes()) { + if ( + this.keyList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.keyList[i] + } + } + } + + *values() { + for (const i of this.indexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + *rvalues() { + for (const i of this.rindexes()) { + if ( + this.valList[i] !== undefined && + !this.isBackgroundFetch(this.valList[i]) + ) { + yield this.valList[i] + } + } + } + + [Symbol.iterator]() { + return this.entries() + } + + find(fn, getOptions) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.keyList[i], this)) { + return this.get(this.keyList[i], getOptions) + } + } + } + + forEach(fn, thisp = this) { + for (const i of this.indexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + rforEach(fn, thisp = this) { + for (const i of this.rindexes()) { + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.keyList[i], this) + } + } + + get prune() { + deprecatedMethod('prune', 'purgeStale') + return this.purgeStale + } + + purgeStale() { + let deleted = false + for (const i of this.rindexes({ allowStale: true })) { + if (this.isStale(i)) { + this.delete(this.keyList[i]) + deleted = true + } + } + return deleted + } + + dump() { + const arr = [] + for (const i of this.indexes({ allowStale: true })) { + const key = this.keyList[i] + const v = this.valList[i] + const value = this.isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + const entry = { value } + if (this.ttls) { + entry.ttl = this.ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.sizes) { + entry.size = this.sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + load(arr) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset. + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + dispose(_v, _k, _reason) {} + + set( + k, + v, + { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + status, + } = {} + ) { + size = this.requireSize(k, v, size, sizeCalculation) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case a background fetch is there already. + // in non-async cases, this is a no-op + this.delete(k) + return this + } + let index = this.size === 0 ? undefined : this.keyMap.get(k) + if (index === undefined) { + // addition + index = this.newIndex() + this.keyList[index] = k + this.valList[index] = v + this.keyMap.set(k, index) + this.next[this.tail] = index + this.prev[index] = this.tail + this.tail = index + this.size++ + this.addItemSize(index, size, status) + if (status) { + status.set = 'add' + } + noUpdateTTL = false + } else { + // update + this.moveToTail(index) + const oldVal = this.valList[index] + if (v !== oldVal) { + if (this.isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + } else { + if (!noDisposeOnSet) { + this.dispose(oldVal, k, 'set') + if (this.disposeAfter) { + this.disposed.push([oldVal, k, 'set']) + } + } + } + this.removeItemSize(index) + this.valList[index] = v + this.addItemSize(index, size, status) + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && this.ttl === 0 && !this.ttls) { + this.initializeTTLTracking() + } + if (!noUpdateTTL) { + this.setItemTTL(index, ttl, start) + } + this.statusTTL(status, index) + if (this.disposeAfter) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return this + } + + newIndex() { + if (this.size === 0) { + return this.tail + } + if (this.size === this.max && this.max !== 0) { + return this.evict(false) + } + if (this.free.length !== 0) { + return this.free.pop() + } + // initial fill, just keep writing down the list + return this.initialFill++ + } + + pop() { + if (this.size) { + const val = this.valList[this.head] + this.evict(true) + return val + } + } + + evict(free) { + const head = this.head + const k = this.keyList[head] + const v = this.valList[head] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else { + this.dispose(v, k, 'evict') + if (this.disposeAfter) { + this.disposed.push([v, k, 'evict']) + } + } + this.removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.keyList[head] = null + this.valList[head] = null + this.free.push(head) + } + this.head = this.next[head] + this.keyMap.delete(k) + this.size-- + return head + } + + has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined) { + if (!this.isStale(index)) { + if (updateAgeOnHas) { + this.updateItemAge(index) + } + if (status) status.has = 'hit' + this.statusTTL(status, index) + return true + } else if (status) { + status.has = 'stale' + this.statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + // like get(), but without any LRU updating or TTL expiration + peek(k, { allowStale = this.allowStale } = {}) { + const index = this.keyMap.get(k) + if (index !== undefined && (allowStale || !this.isStale(index))) { + const v = this.valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.valList[index] + if (this.isBackgroundFetch(v)) { + return v + } + const ac = new AC() + if (options.signal) { + options.signal.addEventListener('abort', () => + ac.abort(options.signal.reason) + ) + } + const fetchOpts = { + signal: ac.signal, + options, + context, + } + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + if (this.valList[index] === p) { + if (v === undefined) { + if (p.__staleWhileFetching) { + this.valList[index] = p.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + const eb = er => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + const fetchFail = er => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + if (this.valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || p.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.valList[index] = p.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && p.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return p.__staleWhileFetching + } else if (p.__returned === p) { + throw er + } + } + const pcall = (res, rej) => { + this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res() + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + p.__abortController = ac + p.__staleWhileFetching = v + p.__returned = null + if (index === undefined) { + // internal, don't expose status. + this.set(k, p, { ...fetchOpts.options, status: undefined }) + index = this.keyMap.get(k) + } else { + this.valList[index] = p + } + return p + } + + isBackgroundFetch(p) { + return ( + p && + typeof p === 'object' && + typeof p.then === 'function' && + Object.prototype.hasOwnProperty.call( + p, + '__staleWhileFetching' + ) && + Object.prototype.hasOwnProperty.call(p, '__returned') && + (p.__returned === p || p.__returned === null) + ) + } + + // this takes the union of get() and set() opts, because it does both + async fetch( + k, + { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + fetchContext = this.fetchContext, + forceRefresh = false, + status, + signal, + } = {} + ) { + if (!this.fetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.backgroundFetch(k, index, options, fetchContext) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + this.statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.backgroundFetch(k, index, options, fetchContext) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = hasStale && isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + get( + k, + { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = {} + ) { + const index = this.keyMap.get(k) + if (index !== undefined) { + const value = this.valList[index] + const fetching = this.isBackgroundFetch(value) + this.statusTTL(status, index) + if (this.isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status) status.returnedStale = allowStale + return allowStale ? value : undefined + } else { + if (status) { + status.returnedStale = + allowStale && value.__staleWhileFetching !== undefined + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.moveToTail(index) + if (updateAgeOnGet) { + this.updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + connect(p, n) { + this.prev[n] = p + this.next[p] = n + } + + moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.tail) { + if (index === this.head) { + this.head = this.next[index] + } else { + this.connect(this.prev[index], this.next[index]) + } + this.connect(this.tail, index) + this.tail = index + } + } + + get del() { + deprecatedMethod('del', 'delete') + return this.delete + } + + delete(k) { + let deleted = false + if (this.size !== 0) { + const index = this.keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.size === 1) { + this.clear() + } else { + this.removeItemSize(index) + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + this.keyMap.delete(k) + this.keyList[index] = null + this.valList[index] = null + if (index === this.tail) { + this.tail = this.prev[index] + } else if (index === this.head) { + this.head = this.next[index] + } else { + this.next[this.prev[index]] = this.next[index] + this.prev[this.next[index]] = this.prev[index] + } + this.size-- + this.free.push(index) + } + } + } + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + return deleted + } + + clear() { + for (const index of this.rindexes({ allowStale: true })) { + const v = this.valList[index] + if (this.isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.keyList[index] + this.dispose(v, k, 'delete') + if (this.disposeAfter) { + this.disposed.push([v, k, 'delete']) + } + } + } + + this.keyMap.clear() + this.valList.fill(null) + this.keyList.fill(null) + if (this.ttls) { + this.ttls.fill(0) + this.starts.fill(0) + } + if (this.sizes) { + this.sizes.fill(0) + } + this.head = 0 + this.tail = 0 + this.initialFill = 1 + this.free.length = 0 + this.calculatedSize = 0 + this.size = 0 + if (this.disposed) { + while (this.disposed.length) { + this.disposeAfter(...this.disposed.shift()) + } + } + } + + get reset() { + deprecatedMethod('reset', 'clear') + return this.clear + } + + get length() { + deprecatedProperty('length', 'size') + return this.size + } + + static get AbortController() { + return AC + } + static get AbortSignal() { + return AS + } +} + +export default LRUCache diff --git a/node_modules/tuf-js/node_modules/lru-cache/package.json b/node_modules/tuf-js/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..9684991727e7a --- /dev/null +++ b/node_modules/tuf-js/node_modules/lru-cache/package.json @@ -0,0 +1,96 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "7.18.3", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "size": "size-limit", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc ./index.d.ts" + }, + "type": "commonjs", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "@size-limit/preset-small-lib": "^7.0.8", + "@types/node": "^17.0.31", + "@types/tap": "^15.0.6", + "benchmark": "^2.1.4", + "c8": "^7.11.2", + "clock-mock": "^1.0.6", + "eslint-config-prettier": "^8.5.0", + "prettier": "^2.6.2", + "size-limit": "^7.0.8", + "tap": "^16.3.4", + "ts-node": "^10.7.0", + "tslib": "^2.4.0", + "typedoc": "^0.23.24", + "typescript": "^4.6.4" + }, + "license": "ISC", + "files": [ + "index.js", + "index.mjs", + "index.d.ts" + ], + "engines": { + "node": ">=12" + }, + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "nyc-arg": [ + "--include=index.js" + ], + "node-arg": [ + "--expose-gc", + "--require", + "ts-node/register" + ], + "ts": false + }, + "size-limit": [ + { + "path": "./index.js" + } + ] +} diff --git a/package-lock.json b/package-lock.json index f6716731f65bf..c9990ac4f6087 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2353,15 +2353,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/@npmcli/agent/node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", - "inBundle": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/@npmcli/arborist": { "resolved": "workspaces/arborist", "link": true @@ -2437,15 +2428,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", - "inBundle": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/@npmcli/installed-package-contents": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.0.2.tgz", @@ -2513,6 +2495,14 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/metavuln-calculator/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "engines": { + "node": ">=12" + } + }, "node_modules/@npmcli/mock-globals": { "resolved": "mock-globals", "link": true @@ -2706,6 +2696,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/@npmcli/template-oss/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", @@ -3635,15 +3634,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/cacache/node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", - "inBundle": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/caching-transform": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", @@ -6536,15 +6526,6 @@ "node": "^16.14.0 || >=18.0.0" } }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", - "inBundle": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/html-encoding-sniffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-3.0.0.tgz", @@ -6791,6 +6772,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/init-package-json/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/init-package-json/node_modules/npm-package-arg": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", @@ -8004,12 +7994,12 @@ } }, "node_modules/lru-cache": { - "version": "7.18.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", - "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", + "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", "inBundle": true, "engines": { - "node": ">=12" + "node": "14 || >=16.14" } }, "node_modules/make-dir": { @@ -9598,6 +9588,15 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/node-gyp/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/node-gyp/node_modules/make-fetch-happen": { "version": "11.1.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", @@ -9780,6 +9779,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/normalize-package-data/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -9891,6 +9899,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/npm-pick-manifest/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/npm-pick-manifest/node_modules/npm-package-arg": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", @@ -9985,6 +10002,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/npm-registry-fetch/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": { "version": "12.0.0", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-12.0.0.tgz", @@ -10641,6 +10667,15 @@ "node": "^14.17.0 || ^16.13.0 || >=18.0.0" } }, + "node_modules/pacote/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/pacote/node_modules/npm-package-arg": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-10.1.0.tgz", @@ -10791,15 +10826,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.0.1.tgz", - "integrity": "sha512-IJ4uwUTi2qCccrioU6g9g/5rvvVl13bsdczUUcqbciD9iLr095yj8DQKdObriEvuNSx325N1rV1O0sJFszx75g==", - "inBundle": true, - "engines": { - "node": "14 || >=16.14" - } - }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -12036,6 +12062,15 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/sigstore/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/sigstore/node_modules/make-fetch-happen": { "version": "11.1.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz", @@ -15229,6 +15264,15 @@ "node": ">=16 || 14 >=14.17" } }, + "node_modules/tuf-js/node_modules/lru-cache": { + "version": "7.18.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-7.18.3.tgz", + "integrity": "sha512-jumlc0BIUrS3qJGgIkWZsyfAM7NCWiBcCDhnd+3NNM5KbBmLTgHVfWBcg6W+rLUsIpzpERPsvwUP7CckAQSOoA==", + "inBundle": true, + "engines": { + "node": ">=12" + } + }, "node_modules/tuf-js/node_modules/make-fetch-happen": { "version": "11.1.1", "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-11.1.1.tgz",