Project files
This commit is contained in:
15
receipeServer/frontend_old/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
15
receipeServer/frontend_old/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2010-2022 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
699
receipeServer/frontend_old/node_modules/lru-cache/README.md
generated
vendored
Normal file
699
receipeServer/frontend_old/node_modules/lru-cache/README.md
generated
vendored
Normal file
@@ -0,0 +1,699 @@
|
||||
# lru-cache
|
||||
|
||||
A cache object that deletes the least-recently-used items.
|
||||
|
||||
Specify a max number of the most recently used items that you want to keep,
|
||||
and this cache will keep that many of the most recently accessed items.
|
||||
|
||||
This is not primarily a TTL cache, and does not make strong TTL guarantees.
|
||||
There is no preemptive pruning of expired items, but you _may_ set a TTL
|
||||
on the cache or on a single `set`. If you do so, it will treat expired
|
||||
items as missing, and delete them when fetched.
|
||||
|
||||
As of version 7, this is one of the most performant LRU implementations
|
||||
available in JavaScript, and supports a wide diversity of use cases.
|
||||
However, note that using some of the features will necessarily impact
|
||||
performance, by causing the cache to have to do more work. See the
|
||||
"Performance" section below.
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install lru-cache --save
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
const LRU = require('lru-cache')
|
||||
|
||||
// At least one of 'max', 'ttl', or 'maxSize' is required, to prevent
|
||||
// unsafe unbounded storage.
|
||||
// In most cases, it's best to specify a max for performance, so all
|
||||
// the required memory allocation is done up-front.
|
||||
const options = {
|
||||
// the number of most recently used items to keep.
|
||||
// note that we may store fewer items than this if maxSize is hit.
|
||||
|
||||
max: 500, // <-- Technically optional, but see "Storage Bounds Safety" below
|
||||
|
||||
// if you wish to track item size, you must provide a maxSize
|
||||
// note that we still will only keep up to max *actual items*,
|
||||
// so size tracking may cause fewer than max items to be stored.
|
||||
// At the extreme, a single item of maxSize size will cause everything
|
||||
// else in the cache to be dropped when it is added. Use with caution!
|
||||
// Note also that size tracking can negatively impact performance,
|
||||
// though for most cases, only minimally.
|
||||
maxSize: 5000,
|
||||
|
||||
// function to calculate size of items. useful if storing strings or
|
||||
// buffers or other items where memory size depends on the object itself.
|
||||
// also note that oversized items do NOT immediately get dropped from
|
||||
// the cache, though they will cause faster turnover in the storage.
|
||||
sizeCalculation: (value, key) => {
|
||||
// return an positive integer which is the size of the item,
|
||||
// if a positive integer is not returned, will use 0 as the size.
|
||||
return 1
|
||||
},
|
||||
|
||||
// function to call when the item is removed from the cache
|
||||
// Note that using this can negatively impact performance.
|
||||
dispose: (value, key) => {
|
||||
freeFromMemoryOrWhatever(value)
|
||||
},
|
||||
|
||||
// max time to live for items before they are considered stale
|
||||
// note that stale items are NOT preemptively removed by default,
|
||||
// and MAY live in the cache, contributing to its LRU max, long after
|
||||
// they have expired.
|
||||
// Also, as this cache is optimized for LRU/MRU operations, some of
|
||||
// the staleness/TTL checks will reduce performance, as they will incur
|
||||
// overhead by deleting items.
|
||||
// Must be a positive integer in ms, defaults to 0, which means "no TTL"
|
||||
ttl: 1000 * 60 * 5,
|
||||
|
||||
// return stale items from cache.get() before disposing of them
|
||||
// boolean, default false
|
||||
allowStale: false,
|
||||
|
||||
// update the age of items on cache.get(), renewing their TTL
|
||||
// boolean, default false
|
||||
updateAgeOnGet: false,
|
||||
|
||||
// update the age of items on cache.has(), renewing their TTL
|
||||
// boolean, default false
|
||||
updateAgeOnHas: false,
|
||||
}
|
||||
|
||||
const cache = new LRU(options)
|
||||
|
||||
cache.set("key", "value")
|
||||
cache.get("key") // "value"
|
||||
|
||||
// non-string keys ARE fully supported
|
||||
// but note that it must be THE SAME object, not
|
||||
// just a JSON-equivalent object.
|
||||
var someObject = { a: 1 }
|
||||
cache.set(someObject, 'a value')
|
||||
// Object keys are not toString()-ed
|
||||
cache.set('[object Object]', 'a different value')
|
||||
assert.equal(cache.get(someObject), 'a value')
|
||||
// A similar object with same keys/values won't work,
|
||||
// because it's a different object identity
|
||||
assert.equal(cache.get({ a: 1 }), undefined)
|
||||
|
||||
cache.clear() // empty the cache
|
||||
```
|
||||
|
||||
If you put more stuff in it, then items will fall out.
|
||||
|
||||
## Options
|
||||
|
||||
* `max` - The maximum number (or size) of items that remain in the cache
|
||||
(assuming no TTL pruning or explicit deletions). Note that fewer items
|
||||
may be stored if size calculation is used, and `maxSize` is exceeded.
|
||||
This must be a positive finite intger.
|
||||
|
||||
At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
||||
positive integer if set.
|
||||
|
||||
**It is strongly recommended to set a `max` to prevent unbounded growth
|
||||
of the cache.** See "Storage Bounds Safety" below.
|
||||
|
||||
* `maxSize` - Set to a positive integer to track the sizes of items added
|
||||
to the cache, and automatically evict items in order to stay below this
|
||||
size. Note that this may result in fewer than `max` items being stored.
|
||||
|
||||
Optional, must be a positive integer if provided. Required if other
|
||||
size tracking features are used.
|
||||
|
||||
At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
||||
positive integer if set.
|
||||
|
||||
Even if size tracking is enabled, **it is strongly recommended to set a
|
||||
`max` to prevent unbounded growth of the cache.** See "Storage Bounds
|
||||
Safety" below.
|
||||
|
||||
* `sizeCalculation` - Function used to calculate the size of stored
|
||||
items. If you're storing strings or buffers, then you probably want to
|
||||
do something like `n => n.length`. The item is passed as the first
|
||||
argument, and the key is passed as the second argument.
|
||||
|
||||
This may be overridden by passing an options object to `cache.set()`.
|
||||
|
||||
Requires `maxSize` to be set.
|
||||
|
||||
Deprecated alias: `length`
|
||||
|
||||
* `fetchMethod` Function that is used to make background asynchronous
|
||||
fetches. Called with `fetchMethod(key, staleValue, { signal, options })`.
|
||||
May return a Promise.
|
||||
|
||||
If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
|
||||
to `Promise.resolve(cache.get(key))`.
|
||||
|
||||
The `signal` object is an `AbortSignal`. If at any time,
|
||||
`signal.aborted` is set to `true`, then that means that the fetch
|
||||
should be abandoned. This may be passed along to async functions aware
|
||||
of AbortController/AbortSignal behavior.
|
||||
|
||||
The `options` object is a union of the options that may be provided to
|
||||
`set()` and `get()`. If they are modified, then that will result in
|
||||
modifying the settings to `cache.set()` when the value is resolved.
|
||||
For example, a DNS cache may update the TTL based on the value returned
|
||||
from a remote DNS server by changing `options.ttl` in the
|
||||
`fetchMethod`.
|
||||
|
||||
* `dispose` Function that is called on items when they are dropped
|
||||
from the cache, as `this.dispose(value, key, reason)`.
|
||||
|
||||
This can be handy if you want to close file descriptors or do other
|
||||
cleanup tasks when items are no longer stored in the cache.
|
||||
|
||||
**NOTE**: It is called *before* the item has been fully removed from
|
||||
the cache, so if you want to put it right back in, you need to wait
|
||||
until the next tick. If you try to add it back in during the
|
||||
`dispose()` function call, it will break things in subtle and weird
|
||||
ways.
|
||||
|
||||
Unlike several other options, this may _not_ be overridden by passing
|
||||
an option to `set()`, for performance reasons. If disposal functions
|
||||
may vary between cache entries, then the entire list must be scanned
|
||||
on every cache swap, even if no disposal function is in use.
|
||||
|
||||
The `reason` will be one of the following strings, corresponding to the
|
||||
reason for the item's deletion:
|
||||
|
||||
* `evict` Item was evicted to make space for a new addition
|
||||
* `set` Item was overwritten by a new value
|
||||
* `delete` Item was removed by explicit `cache.delete(key)` or by
|
||||
calling `cache.clear()`, which deletes everything.
|
||||
|
||||
The `dispose()` method is _not_ called for canceled calls to
|
||||
`fetchMethod()`. If you wish to handle evictions, overwrites, and
|
||||
deletes of in-flight asynchronous fetches, you must use the
|
||||
`AbortSignal` provided.
|
||||
|
||||
Optional, must be a function.
|
||||
|
||||
* `disposeAfter` The same as `dispose`, but called _after_ the entry is
|
||||
completely removed and the cache is once again in a clean state.
|
||||
|
||||
It is safe to add an item right back into the cache at this point.
|
||||
However, note that it is _very_ easy to inadvertently create infinite
|
||||
recursion in this way.
|
||||
|
||||
The `disposeAfter()` method is _not_ called for canceled calls to
|
||||
`fetchMethod()`. If you wish to handle evictions, overwrites, and
|
||||
deletes of in-flight asynchronous fetches, you must use the
|
||||
`AbortSignal` provided.
|
||||
|
||||
* `noDisposeOnSet` Set to `true` to suppress calling the `dispose()`
|
||||
function if the entry key is still accessible within the cache.
|
||||
|
||||
This may be overridden by passing an options object to `cache.set()`.
|
||||
|
||||
Boolean, default `false`. Only relevant if `dispose` or `disposeAfter`
|
||||
options are set.
|
||||
|
||||
* `ttl` - max time to live for items before they are considered stale.
|
||||
Note that stale items are NOT preemptively removed by default, and MAY
|
||||
live in the cache, contributing to its LRU max, long after they have
|
||||
expired.
|
||||
|
||||
Also, as this cache is optimized for LRU/MRU operations, some of
|
||||
the staleness/TTL checks will reduce performance, as they will incur
|
||||
overhead by deleting from Map objects rather than simply throwing old
|
||||
Map objects away.
|
||||
|
||||
This is not primarily a TTL cache, and does not make strong TTL
|
||||
guarantees. There is no pre-emptive pruning of expired items, but you
|
||||
_may_ set a TTL on the cache, and it will treat expired items as missing
|
||||
when they are fetched, and delete them.
|
||||
|
||||
Optional, but must be a positive integer in ms if specified.
|
||||
|
||||
This may be overridden by passing an options object to `cache.set()`.
|
||||
|
||||
At least one of `max`, `maxSize`, or `TTL` is required. This must be a
|
||||
positive integer if set.
|
||||
|
||||
Even if ttl tracking is enabled, **it is strongly recommended to set a
|
||||
`max` to prevent unbounded growth of the cache.** See "Storage Bounds
|
||||
Safety" below.
|
||||
|
||||
If ttl tracking is enabled, and `max` and `maxSize` are not set, and
|
||||
`ttlAutopurge` is not set, then a warning will be emitted cautioning
|
||||
about the potential for unbounded memory consumption.
|
||||
|
||||
Deprecated alias: `maxAge`
|
||||
|
||||
* `noUpdateTTL` - Boolean flag to tell the cache to not update the TTL when
|
||||
setting a new value for an existing key (ie, when updating a value rather
|
||||
than inserting a new value). Note that the TTL value is _always_ set
|
||||
(if provided) when adding a new entry into the cache.
|
||||
|
||||
This may be passed as an option to `cache.set()`.
|
||||
|
||||
Boolean, default false.
|
||||
|
||||
* `ttlResolution` - Minimum amount of time in ms in which to check for
|
||||
staleness. Defaults to `1`, which means that the current time is checked
|
||||
at most once per millisecond.
|
||||
|
||||
Set to `0` to check the current time every time staleness is tested.
|
||||
|
||||
Note that setting this to a higher value _will_ improve performance
|
||||
somewhat while using ttl tracking, albeit at the expense of keeping
|
||||
stale items around a bit longer than intended.
|
||||
|
||||
* `ttlAutopurge` - Preemptively remove stale items from the cache.
|
||||
|
||||
Note that this may _significantly_ degrade performance, especially if
|
||||
the cache is storing a large number of items. It is almost always best
|
||||
to just leave the stale items in the cache, and let them fall out as
|
||||
new items are added.
|
||||
|
||||
Note that this means that `allowStale` is a bit pointless, as stale
|
||||
items will be deleted almost as soon as they expire.
|
||||
|
||||
Use with caution!
|
||||
|
||||
Boolean, default `false`
|
||||
|
||||
* `allowStale` - By default, if you set `ttl`, it'll only delete stale
|
||||
items from the cache when you `get(key)`. That is, it's not
|
||||
preemptively pruning items.
|
||||
|
||||
If you set `allowStale:true`, it'll return the stale value as well as
|
||||
deleting it. If you don't set this, then it'll return `undefined` when
|
||||
you try to get a stale entry.
|
||||
|
||||
Note that when a stale entry is fetched, _even if it is returned due to
|
||||
`allowStale` being set_, it is removed from the cache immediately. You
|
||||
can immediately put it back in the cache if you wish, thus resetting the
|
||||
TTL.
|
||||
|
||||
This may be overridden by passing an options object to `cache.get()`.
|
||||
The `cache.has()` method will always return `false` for stale items.
|
||||
|
||||
Boolean, default false, only relevant if `ttl` is set.
|
||||
|
||||
Deprecated alias: `stale`
|
||||
|
||||
* `updateAgeOnGet` - When using time-expiring entries with `ttl`, setting
|
||||
this to `true` will make each item's age reset to 0 whenever it is
|
||||
retrieved from cache with `get()`, causing it to not expire. (It can
|
||||
still fall out of cache based on recency of use, of course.)
|
||||
|
||||
This may be overridden by passing an options object to `cache.get()`.
|
||||
|
||||
Boolean, default false, only relevant if `ttl` is set.
|
||||
|
||||
* `updateAgeOnHas` - When using time-expiring entries with `ttl`, setting
|
||||
this to `true` will make each item's age reset to 0 whenever its presence
|
||||
in the cache is checked with `has()`, causing it to not expire. (It can
|
||||
still fall out of cache based on recency of use, of course.)
|
||||
|
||||
This may be overridden by passing an options object to `cache.has()`.
|
||||
|
||||
Boolean, default false, only relevant if `ttl` is set.
|
||||
|
||||
## API
|
||||
|
||||
* `new LRUCache(options)`
|
||||
|
||||
Create a new LRUCache. All options are documented above, and are on
|
||||
the cache as public members.
|
||||
|
||||
* `cache.max`, `cache.maxSize`, `cache.allowStale`, `cache.noDisposeOnSet`,
|
||||
`cache.sizeCalculation`, `cache.dispose`, `cache.maxSize`, `cache.ttl`,
|
||||
`cache.updateAgeOnGet`, `cache.updateAgeOnHas`
|
||||
|
||||
All option names are exposed as public members on the cache object.
|
||||
|
||||
These are intended for read access only. Changing them during program
|
||||
operation can cause undefined behavior.
|
||||
|
||||
* `cache.size`
|
||||
|
||||
The total number of items held in the cache at the current moment.
|
||||
|
||||
* `cache.calculatedSize`
|
||||
|
||||
The total size of items in cache when using size tracking.
|
||||
|
||||
* `set(key, value, [{ size, sizeCalculation, ttl, noDisposeOnSet }])`
|
||||
|
||||
Add a value to the cache.
|
||||
|
||||
Optional options object may contain `ttl` and `sizeCalculation` as
|
||||
described above, which default to the settings on the cache object.
|
||||
|
||||
Options object my also include `size`, which will prevent calling the
|
||||
`sizeCalculation` function and just use the specified number if it is a
|
||||
positive integer, and `noDisposeOnSet` which will prevent calling a
|
||||
`dispose` function in the case of overwrites.
|
||||
|
||||
Will update the recency of the entry.
|
||||
|
||||
Returns the cache object.
|
||||
|
||||
* `get(key, { updateAgeOnGet, allowStale } = {}) => value`
|
||||
|
||||
Return a value from the cache.
|
||||
|
||||
Will update the recency of the cache entry found.
|
||||
|
||||
If the key is not found, `get()` will return `undefined`. This can be
|
||||
confusing when setting values specifically to `undefined`, as in
|
||||
`cache.set(key, undefined)`. Use `cache.has()` to determine whether a
|
||||
key is present in the cache at all.
|
||||
|
||||
* `async fetch(key, { updateAgeOnGet, allowStale, size, sizeCalculation, ttl, noDisposeOnSet } = {}) => Promise`
|
||||
|
||||
If the value is in the cache and not stale, then the returned Promise
|
||||
resolves to the value.
|
||||
|
||||
If not in the cache, or beyond its TTL staleness, then
|
||||
`fetchMethod(key, staleValue, options)` is called, and the value
|
||||
returned will be added to the cache once resolved.
|
||||
|
||||
If called with `allowStale`, and an asynchronous fetch is currently in
|
||||
progress to reload a stale value, then the former stale value will be
|
||||
returned.
|
||||
|
||||
Multiple fetches for the same `key` will only call `fetchMethod` a
|
||||
single time, and all will be resolved when the value is resolved, even
|
||||
if different options are used.
|
||||
|
||||
If `fetchMethod` is not specified, then this is effectively an alias
|
||||
for `Promise.resolve(cache.get(key))`.
|
||||
|
||||
When the fetch method resolves to a value, if the fetch has not been
|
||||
aborted due to deletion, eviction, or being overwritten, then it is
|
||||
added to the cache using the options provided.
|
||||
|
||||
* `peek(key, { allowStale } = {}) => value`
|
||||
|
||||
Like `get()` but doesn't update recency or delete stale items.
|
||||
|
||||
Returns `undefined` if the item is stale, unless `allowStale` is set
|
||||
either on the cache or in the options object.
|
||||
|
||||
* `has(key, { updateAgeOnHas } = {}) => Boolean`
|
||||
|
||||
Check if a key is in the cache, without updating the recency of use.
|
||||
Age is updated if `updateAgeOnHas` is set to `true` in either the
|
||||
options or the constructor.
|
||||
|
||||
Will return `false` if the item is stale, even though it is technically
|
||||
in the cache.
|
||||
|
||||
* `delete(key)`
|
||||
|
||||
Deletes a key out of the cache.
|
||||
|
||||
Returns `true` if the key was deleted, `false` otherwise.
|
||||
|
||||
* `clear()`
|
||||
|
||||
Clear the cache entirely, throwing away all values.
|
||||
|
||||
Deprecated alias: `reset()`
|
||||
|
||||
* `keys()`
|
||||
|
||||
Return a generator yielding the keys in the cache, in order from most
|
||||
recently used to least recently used.
|
||||
|
||||
* `rkeys()`
|
||||
|
||||
Return a generator yielding the keys in the cache, in order from least
|
||||
recently used to most recently used.
|
||||
|
||||
* `values()`
|
||||
|
||||
Return a generator yielding the values in the cache, in order from most
|
||||
recently used to least recently used.
|
||||
|
||||
* `rvalues()`
|
||||
|
||||
Return a generator yielding the values in the cache, in order from
|
||||
least recently used to most recently used.
|
||||
|
||||
* `entries()`
|
||||
|
||||
Return a generator yielding `[key, value]` pairs, in order from most
|
||||
recently used to least recently used.
|
||||
|
||||
* `rentries()`
|
||||
|
||||
Return a generator yielding `[key, value]` pairs, in order from least
|
||||
recently used to most recently used.
|
||||
|
||||
* `find(fn, [getOptions])`
|
||||
|
||||
Find a value for which the supplied `fn` method returns a truthy value,
|
||||
similar to `Array.find()`.
|
||||
|
||||
`fn` is called as `fn(value, key, cache)`.
|
||||
|
||||
The optional `getOptions` are applied to the resulting `get()` of the
|
||||
item found.
|
||||
|
||||
* `dump()`
|
||||
|
||||
Return an array of `[key, entry]` objects which can be passed to
|
||||
`cache.load()`
|
||||
|
||||
Note: this returns an actual array, not a generator, so it can be more
|
||||
easily passed around.
|
||||
|
||||
* `load(entries)`
|
||||
|
||||
Reset the cache and load in the items in `entries` in the order listed.
|
||||
Note that the shape of the resulting cache may be different if the same
|
||||
options are not used in both caches.
|
||||
|
||||
* `purgeStale()`
|
||||
|
||||
Delete any stale entries. Returns `true` if anything was removed,
|
||||
`false` otherwise.
|
||||
|
||||
Deprecated alias: `prune`
|
||||
|
||||
* `getRemainingTTL(key)`
|
||||
|
||||
Return the number of ms left in the item's TTL. If item is not in
|
||||
cache, returns `0`. Returns `Infinity` if item is in cache without a
|
||||
defined TTL.
|
||||
|
||||
* `forEach(fn, [thisp])`
|
||||
|
||||
Call the `fn` function with each set of `fn(value, key, cache)` in the
|
||||
LRU cache, from most recent to least recently used.
|
||||
|
||||
Does not affect recency of use.
|
||||
|
||||
If `thisp` is provided, function will be called in the `this`-context
|
||||
of the provided object.
|
||||
|
||||
* `rforEach(fn, [thisp])`
|
||||
|
||||
Same as `cache.forEach(fn, thisp)`, but in order from least recently
|
||||
used to most recently used.
|
||||
|
||||
* `pop()`
|
||||
|
||||
Evict the least recently used item, returning its value.
|
||||
|
||||
Returns `undefined` if cache is empty.
|
||||
|
||||
### Internal Methods and Properties
|
||||
|
||||
In order to optimize performance as much as possible, "private" members and
|
||||
methods are exposed on the object as normal properties, rather than being
|
||||
accessed via Symbols, private members, or closure variables.
|
||||
|
||||
**Do not use or rely on these.** They will change or be removed without
|
||||
notice. They will cause undefined behavior if used inappropriately. There
|
||||
is no need or reason to ever call them directly.
|
||||
|
||||
This documentation is here so that it is especially clear that this not
|
||||
"undocumented" because someone forgot; it _is_ documented, and the
|
||||
documentation is telling you not to do it.
|
||||
|
||||
**Do not report bugs that stem from using these properties.** They will be
|
||||
ignored.
|
||||
|
||||
* `initializeTTLTracking()` Set up the cache for tracking TTLs
|
||||
* `updateItemAge(index)` Called when an item age is updated, by internal ID
|
||||
* `setItemTTL(index)` Called when an item ttl is updated, by internal ID
|
||||
* `isStale(index)` Called to check an item's staleness, by internal ID
|
||||
* `initializeSizeTracking()` Set up the cache for tracking item size.
|
||||
Called automatically when a size is specified.
|
||||
* `removeItemSize(index)` Updates the internal size calculation when an
|
||||
item is removed or modified, by internal ID
|
||||
* `addItemSize(index)` Updates the internal size calculation when an item
|
||||
is added or modified, by internal ID
|
||||
* `indexes()` An iterator over the non-stale internal IDs, from most
|
||||
recently to least recently used.
|
||||
* `rindexes()` An iterator over the non-stale internal IDs, from least
|
||||
recently to most recently used.
|
||||
* `newIndex()` Create a new internal ID, either reusing a deleted ID,
|
||||
evicting the least recently used ID, or walking to the end of the
|
||||
allotted space.
|
||||
* `evict()` Evict the least recently used internal ID, returning its ID.
|
||||
Does not do any bounds checking.
|
||||
* `connect(p, n)` Connect the `p` and `n` internal IDs in the linked list.
|
||||
* `moveToTail(index)` Move the specified internal ID to the most recently
|
||||
used position.
|
||||
* `keyMap` Map of keys to internal IDs
|
||||
* `keyList` List of keys by internal ID
|
||||
* `valList` List of values by internal ID
|
||||
* `sizes` List of calculated sizes by internal ID
|
||||
* `ttls` List of TTL values by internal ID
|
||||
* `starts` List of start time values by internal ID
|
||||
* `next` Array of "next" pointers by internal ID
|
||||
* `prev` Array of "previous" pointers by internal ID
|
||||
* `head` Internal ID of least recently used item
|
||||
* `tail` Internal ID of most recently used item
|
||||
* `free` Stack of deleted internal IDs
|
||||
|
||||
## Storage Bounds Safety
|
||||
|
||||
This implementation aims to be as flexible as possible, within the limits
|
||||
of safe memory consumption and optimal performance.
|
||||
|
||||
At initial object creation, storage is allocated for `max` items. If `max`
|
||||
is set to zero, then some performance is lost, and item count is unbounded.
|
||||
Either `maxSize` or `ttl` _must_ be set if `max` is not specified.
|
||||
|
||||
If `maxSize` is set, then this creates a safe limit on the maximum storage
|
||||
consumed, but without the performance benefits of pre-allocation. When
|
||||
`maxSize` is set, every item _must_ provide a size, either via the
|
||||
`sizeCalculation` method provided to the constructor, or via a `size` or
|
||||
`sizeCalculation` option provided to `cache.set()`. The size of every item
|
||||
_must_ be a positive integer.
|
||||
|
||||
If neither `max` nor `maxSize` are set, then `ttl` tracking must be
|
||||
enabled. Note that, even when tracking item `ttl`, items are _not_
|
||||
preemptively deleted when they become stale, unless `ttlAutopurge` is
|
||||
enabled. Instead, they are only purged the next time the key is requested.
|
||||
Thus, if `ttlAutopurge`, `max`, and `maxSize` are all not set, then the
|
||||
cache will potentially grow unbounded.
|
||||
|
||||
In this case, a warning is printed to standard error. Future versions may
|
||||
require the use of `ttlAutopurge` if `max` and `maxSize` are not specified.
|
||||
|
||||
If you truly wish to use a cache that is bound _only_ by TTL expiration,
|
||||
consider using a `Map` object, and calling `setTimeout` to delete entries
|
||||
when they expire. It will perform much better than an LRU cache.
|
||||
|
||||
Here is an implementation you may use, under the same [license](./LICENSE)
|
||||
as this package:
|
||||
|
||||
```js
|
||||
// a storage-unbounded ttl cache that is not an lru-cache
|
||||
const cache = {
|
||||
data: new Map(),
|
||||
timers: new Map(),
|
||||
set: (k, v, ttl) => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.set(k, setTimeout(() => cache.del(k), ttl))
|
||||
cache.data.set(k, v)
|
||||
},
|
||||
get: k => cache.data.get(k),
|
||||
has: k => cache.data.has(k),
|
||||
delete: k => {
|
||||
if (cache.timers.has(k)) {
|
||||
clearTimeout(cache.timers.get(k))
|
||||
}
|
||||
cache.timers.delete(k)
|
||||
return cache.data.delete(k)
|
||||
},
|
||||
clear: () => {
|
||||
cache.data.clear()
|
||||
for (const v of cache.timers.values()) {
|
||||
clearTimeout(v)
|
||||
}
|
||||
cache.timers.clear()
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Performance
|
||||
|
||||
As of January 2022, version 7 of this library is one of the most performant
|
||||
LRU cache implementations in JavaScript.
|
||||
|
||||
Benchmarks can be extremely difficult to get right. In particular, the
|
||||
performance of set/get/delete operations on objects will vary _wildly_
|
||||
depending on the type of key used. V8 is highly optimized for objects with
|
||||
keys that are short strings, especially integer numeric strings. Thus any
|
||||
benchmark which tests _solely_ using numbers as keys will tend to find that
|
||||
an object-based approach performs the best.
|
||||
|
||||
Note that coercing _anything_ to strings to use as object keys is unsafe,
|
||||
unless you can be 100% certain that no other type of value will be used.
|
||||
For example:
|
||||
|
||||
```js
|
||||
const myCache = {}
|
||||
const set = (k, v) => myCache[k] = v
|
||||
const get = (k) => myCache[k]
|
||||
|
||||
set({}, 'please hang onto this for me')
|
||||
set('[object Object]', 'oopsie')
|
||||
```
|
||||
|
||||
Also beware of "Just So" stories regarding performance. Garbage collection
|
||||
of large (especially: deep) object graphs can be incredibly costly, with
|
||||
several "tipping points" where it increases exponentially. As a result,
|
||||
putting that off until later can make it much worse, and less predictable.
|
||||
If a library performs well, but only in a scenario where the object graph is
|
||||
kept shallow, then that won't help you if you are using large objects as
|
||||
keys.
|
||||
|
||||
In general, when attempting to use a library to improve performance (such
|
||||
as a cache like this one), it's best to choose an option that will perform
|
||||
well in the sorts of scenarios where you'll actually use it.
|
||||
|
||||
This library is optimized for repeated gets and minimizing eviction time,
|
||||
since that is the expected need of a LRU. Set operations are somewhat
|
||||
slower on average than a few other options, in part because of that
|
||||
optimization. It is assumed that you'll be caching some costly operation,
|
||||
ideally as rarely as possible, so optimizing set over get would be unwise.
|
||||
|
||||
If performance matters to you:
|
||||
|
||||
1. If it's at all possible to use small integer values as keys, and you can
|
||||
guarantee that no other types of values will be used as keys, then do
|
||||
that, and use a cache such as
|
||||
[lru-fast](https://npmjs.com/package/lru-fast), or [mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache) which
|
||||
uses an Object as its data store.
|
||||
2. Failing that, if at all possible, use short non-numeric strings (ie,
|
||||
less than 256 characters) as your keys, and use [mnemonist's
|
||||
LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache).
|
||||
3. If the types of your keys will be long strings, strings that look like
|
||||
floats, `null`, objects, or some mix of types, or if you aren't sure,
|
||||
then this library will work well for you.
|
||||
4. Do not use a `dispose` function, size tracking, or especially ttl
|
||||
behavior, unless absolutely needed. These features are convenient, and
|
||||
necessary in some use cases, and every attempt has been made to make the
|
||||
performance impact minimal, but it isn't nothing.
|
||||
|
||||
## Breaking Changes in Version 7
|
||||
|
||||
This library changed to a different algorithm and internal data structure
|
||||
in version 7, yielding significantly better performance, albeit with
|
||||
some subtle changes as a result.
|
||||
|
||||
If you were relying on the internals of LRUCache in version 6 or before, it
|
||||
probably will not work in version 7 and above.
|
||||
|
||||
For more info, see the [change log](CHANGELOG.md).
|
||||
815
receipeServer/frontend_old/node_modules/lru-cache/index.js
generated
vendored
Normal file
815
receipeServer/frontend_old/node_modules/lru-cache/index.js
generated
vendored
Normal file
@@ -0,0 +1,815 @@
|
||||
const perf = typeof performance === 'object' && performance &&
|
||||
typeof performance.now === 'function' ? performance : Date
|
||||
|
||||
const hasAbortController = typeof AbortController !== 'undefined'
|
||||
|
||||
// minimal backwards-compatibility polyfill
|
||||
const AC = hasAbortController ? AbortController : Object.assign(
|
||||
class AbortController {
|
||||
constructor () { this.signal = new AC.AbortSignal }
|
||||
abort () { this.signal.aborted = true }
|
||||
},
|
||||
{ AbortSignal: class AbortSignal { constructor () { this.aborted = false }}}
|
||||
)
|
||||
|
||||
const warned = new Set()
|
||||
const deprecatedOption = (opt, instead) => {
|
||||
const code = `LRU_CACHE_OPTION_${opt}`
|
||||
if (shouldWarn(code)) {
|
||||
warn(code, `${opt} option`, `options.${instead}`, LRUCache)
|
||||
}
|
||||
}
|
||||
const deprecatedMethod = (method, instead) => {
|
||||
const code = `LRU_CACHE_METHOD_${method}`
|
||||
if (shouldWarn(code)) {
|
||||
const { prototype } = LRUCache
|
||||
const { get } = Object.getOwnPropertyDescriptor(prototype, method)
|
||||
warn(code, `${method} method`, `cache.${instead}()`, get)
|
||||
}
|
||||
}
|
||||
const deprecatedProperty = (field, instead) => {
|
||||
const code = `LRU_CACHE_PROPERTY_${field}`
|
||||
if (shouldWarn(code)) {
|
||||
const { prototype } = LRUCache
|
||||
const { get } = Object.getOwnPropertyDescriptor(prototype, field)
|
||||
warn(code, `${field} property`, `cache.${instead}`, get)
|
||||
}
|
||||
}
|
||||
|
||||
const emitWarning = (...a) => {
|
||||
typeof process === 'object' &&
|
||||
process &&
|
||||
typeof process.emitWarning === 'function'
|
||||
? process.emitWarning(...a)
|
||||
: console.error(...a)
|
||||
}
|
||||
|
||||
const shouldWarn = code => !warned.has(code)
|
||||
|
||||
const warn = (code, what, instead, fn) => {
|
||||
warned.add(code)
|
||||
const msg = `The ${what} is deprecated. Please use ${instead} instead.`
|
||||
emitWarning(msg, 'DeprecationWarning', code, fn)
|
||||
}
|
||||
|
||||
const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n)
|
||||
|
||||
/* istanbul ignore next - This is a little bit ridiculous, tbh.
|
||||
* The maximum array length is 2^32-1 or thereabouts on most JS impls.
|
||||
* And well before that point, you're caching the entire world, I mean,
|
||||
* that's ~32GB of just integers for the next/prev links, plus whatever
|
||||
* else to hold that many keys and values. Just filling the memory with
|
||||
* zeroes at init time is brutal when you get that big.
|
||||
* But why not be complete?
|
||||
* Maybe in the future, these limits will have expanded. */
|
||||
const getUintArray = max => !isPosInt(max) ? null
|
||||
: max <= Math.pow(2, 8) ? Uint8Array
|
||||
: max <= Math.pow(2, 16) ? Uint16Array
|
||||
: max <= Math.pow(2, 32) ? Uint32Array
|
||||
: max <= Number.MAX_SAFE_INTEGER ? ZeroArray
|
||||
: null
|
||||
|
||||
class ZeroArray extends Array {
|
||||
constructor (size) {
|
||||
super(size)
|
||||
this.fill(0)
|
||||
}
|
||||
}
|
||||
|
||||
class Stack {
|
||||
constructor (max) {
|
||||
const UintArray = max ? getUintArray(max) : Array
|
||||
this.heap = new UintArray(max)
|
||||
this.length = 0
|
||||
}
|
||||
push (n) {
|
||||
this.heap[this.length++] = n
|
||||
}
|
||||
pop () {
|
||||
return this.heap[--this.length]
|
||||
}
|
||||
}
|
||||
|
||||
class LRUCache {
|
||||
constructor (options = {}) {
|
||||
const {
|
||||
max = 0,
|
||||
ttl,
|
||||
ttlResolution = 1,
|
||||
ttlAutopurge,
|
||||
updateAgeOnGet,
|
||||
updateAgeOnHas,
|
||||
allowStale,
|
||||
dispose,
|
||||
disposeAfter,
|
||||
noDisposeOnSet,
|
||||
noUpdateTTL,
|
||||
maxSize = 0,
|
||||
sizeCalculation,
|
||||
fetchMethod,
|
||||
} = options
|
||||
|
||||
// deprecated options, don't trigger a warning for getting them if
|
||||
// the thing being passed in is another LRUCache we're copying.
|
||||
const {
|
||||
length,
|
||||
maxAge,
|
||||
stale,
|
||||
} = options instanceof LRUCache ? {} : options
|
||||
|
||||
if (max !== 0 && !isPosInt(max)) {
|
||||
throw new TypeError('max option must be a nonnegative integer')
|
||||
}
|
||||
|
||||
const UintArray = max ? getUintArray(max) : Array
|
||||
if (!UintArray) {
|
||||
throw new Error('invalid max value: ' + max)
|
||||
}
|
||||
|
||||
this.max = max
|
||||
this.maxSize = maxSize
|
||||
this.sizeCalculation = sizeCalculation || length
|
||||
if (this.sizeCalculation) {
|
||||
if (!this.maxSize) {
|
||||
throw new TypeError('cannot set sizeCalculation without setting maxSize')
|
||||
}
|
||||
if (typeof this.sizeCalculation !== 'function') {
|
||||
throw new TypeError('sizeCalculation set to non-function')
|
||||
}
|
||||
}
|
||||
|
||||
this.fetchMethod = fetchMethod || null
|
||||
if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
|
||||
throw new TypeError('fetchMethod must be a function if specified')
|
||||
}
|
||||
|
||||
|
||||
this.keyMap = new Map()
|
||||
this.keyList = new Array(max).fill(null)
|
||||
this.valList = new Array(max).fill(null)
|
||||
this.next = new UintArray(max)
|
||||
this.prev = new UintArray(max)
|
||||
this.head = 0
|
||||
this.tail = 0
|
||||
this.free = new Stack(max)
|
||||
this.initialFill = 1
|
||||
this.size = 0
|
||||
|
||||
if (typeof dispose === 'function') {
|
||||
this.dispose = dispose
|
||||
}
|
||||
if (typeof disposeAfter === 'function') {
|
||||
this.disposeAfter = disposeAfter
|
||||
this.disposed = []
|
||||
} else {
|
||||
this.disposeAfter = null
|
||||
this.disposed = null
|
||||
}
|
||||
this.noDisposeOnSet = !!noDisposeOnSet
|
||||
this.noUpdateTTL = !!noUpdateTTL
|
||||
|
||||
if (this.maxSize !== 0) {
|
||||
if (!isPosInt(this.maxSize)) {
|
||||
throw new TypeError('maxSize must be a positive integer if specified')
|
||||
}
|
||||
this.initializeSizeTracking()
|
||||
}
|
||||
|
||||
this.allowStale = !!allowStale || !!stale
|
||||
this.updateAgeOnGet = !!updateAgeOnGet
|
||||
this.updateAgeOnHas = !!updateAgeOnHas
|
||||
this.ttlResolution = isPosInt(ttlResolution) || ttlResolution === 0
|
||||
? ttlResolution : 1
|
||||
this.ttlAutopurge = !!ttlAutopurge
|
||||
this.ttl = ttl || maxAge || 0
|
||||
if (this.ttl) {
|
||||
if (!isPosInt(this.ttl)) {
|
||||
throw new TypeError('ttl must be a positive integer if specified')
|
||||
}
|
||||
this.initializeTTLTracking()
|
||||
}
|
||||
|
||||
// do not allow completely unbounded caches
|
||||
if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) {
|
||||
throw new TypeError('At least one of max, maxSize, or ttl is required')
|
||||
}
|
||||
if (!this.ttlAutopurge && !this.max && !this.maxSize) {
|
||||
const code = 'LRU_CACHE_UNBOUNDED'
|
||||
if (shouldWarn(code)) {
|
||||
warned.add(code)
|
||||
const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
|
||||
'result in unbounded memory consumption.'
|
||||
emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)
|
||||
}
|
||||
}
|
||||
|
||||
if (stale) {
|
||||
deprecatedOption('stale', 'allowStale')
|
||||
}
|
||||
if (maxAge) {
|
||||
deprecatedOption('maxAge', 'ttl')
|
||||
}
|
||||
if (length) {
|
||||
deprecatedOption('length', 'sizeCalculation')
|
||||
}
|
||||
}
|
||||
|
||||
getRemainingTTL (key) {
|
||||
return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0
|
||||
}
|
||||
|
||||
initializeTTLTracking () {
|
||||
this.ttls = new ZeroArray(this.max)
|
||||
this.starts = new ZeroArray(this.max)
|
||||
|
||||
this.setItemTTL = (index, ttl) => {
|
||||
this.starts[index] = ttl !== 0 ? perf.now() : 0
|
||||
this.ttls[index] = ttl
|
||||
if (ttl !== 0 && this.ttlAutopurge) {
|
||||
const t = setTimeout(() => {
|
||||
if (this.isStale(index)) {
|
||||
this.delete(this.keyList[index])
|
||||
}
|
||||
}, ttl + 1)
|
||||
/* istanbul ignore else - unref() not supported on all platforms */
|
||||
if (t.unref) {
|
||||
t.unref()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.updateItemAge = (index) => {
|
||||
this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0
|
||||
}
|
||||
|
||||
// debounce calls to perf.now() to 1s so we're not hitting
|
||||
// that costly call repeatedly.
|
||||
let cachedNow = 0
|
||||
const getNow = () => {
|
||||
const n = perf.now()
|
||||
if (this.ttlResolution > 0) {
|
||||
cachedNow = n
|
||||
const t = setTimeout(() => cachedNow = 0, this.ttlResolution)
|
||||
/* istanbul ignore else - not available on all platforms */
|
||||
if (t.unref) {
|
||||
t.unref()
|
||||
}
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
this.getRemainingTTL = (key) => {
|
||||
const index = this.keyMap.get(key)
|
||||
if (index === undefined) {
|
||||
return 0
|
||||
}
|
||||
return this.ttls[index] === 0 || this.starts[index] === 0 ? Infinity
|
||||
: ((this.starts[index] + this.ttls[index]) - (cachedNow || getNow()))
|
||||
}
|
||||
|
||||
this.isStale = (index) => {
|
||||
return this.ttls[index] !== 0 && this.starts[index] !== 0 &&
|
||||
((cachedNow || getNow()) - this.starts[index] > this.ttls[index])
|
||||
}
|
||||
}
|
||||
updateItemAge (index) {}
|
||||
setItemTTL (index, ttl) {}
|
||||
isStale (index) { return false }
|
||||
|
||||
initializeSizeTracking () {
|
||||
this.calculatedSize = 0
|
||||
this.sizes = new ZeroArray(this.max)
|
||||
this.removeItemSize = index => this.calculatedSize -= this.sizes[index]
|
||||
this.requireSize = (k, v, size, sizeCalculation) => {
|
||||
if (!isPosInt(size)) {
|
||||
if (sizeCalculation) {
|
||||
if (typeof sizeCalculation !== 'function') {
|
||||
throw new TypeError('sizeCalculation must be a function')
|
||||
}
|
||||
size = sizeCalculation(v, k)
|
||||
if (!isPosInt(size)) {
|
||||
throw new TypeError('sizeCalculation return invalid (expect positive integer)')
|
||||
}
|
||||
} else {
|
||||
throw new TypeError('invalid size value (must be positive integer)')
|
||||
}
|
||||
}
|
||||
return size
|
||||
}
|
||||
this.addItemSize = (index, v, k, size) => {
|
||||
this.sizes[index] = size
|
||||
const maxSize = this.maxSize - this.sizes[index]
|
||||
while (this.calculatedSize > maxSize) {
|
||||
this.evict()
|
||||
}
|
||||
this.calculatedSize += this.sizes[index]
|
||||
}
|
||||
this.delete = k => {
|
||||
if (this.size !== 0) {
|
||||
const index = this.keyMap.get(k)
|
||||
if (index !== undefined) {
|
||||
this.calculatedSize -= this.sizes[index]
|
||||
}
|
||||
}
|
||||
return LRUCache.prototype.delete.call(this, k)
|
||||
}
|
||||
}
|
||||
removeItemSize (index) {}
|
||||
addItemSize (index, v, k, size) {}
|
||||
requireSize (k, v, size, sizeCalculation) {
|
||||
if (size || sizeCalculation) {
|
||||
throw new TypeError('cannot set size without setting maxSize on cache')
|
||||
}
|
||||
}
|
||||
|
||||
*indexes ({ allowStale = this.allowStale } = {}) {
|
||||
if (this.size) {
|
||||
for (let i = this.tail; true; ) {
|
||||
if (!this.isValidIndex(i)) {
|
||||
break
|
||||
}
|
||||
if (allowStale || !this.isStale(i)) {
|
||||
yield i
|
||||
}
|
||||
if (i === this.head) {
|
||||
break
|
||||
} else {
|
||||
i = this.prev[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
*rindexes ({ allowStale = this.allowStale } = {}) {
|
||||
if (this.size) {
|
||||
for (let i = this.head; true; ) {
|
||||
if (!this.isValidIndex(i)) {
|
||||
break
|
||||
}
|
||||
if (allowStale || !this.isStale(i)) {
|
||||
yield i
|
||||
}
|
||||
if (i === this.tail) {
|
||||
break
|
||||
} else {
|
||||
i = this.next[i]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
isValidIndex (index) {
|
||||
return this.keyMap.get(this.keyList[index]) === index
|
||||
}
|
||||
|
||||
*entries () {
|
||||
for (const i of this.indexes()) {
|
||||
yield [this.keyList[i], this.valList[i]]
|
||||
}
|
||||
}
|
||||
*rentries () {
|
||||
for (const i of this.rindexes()) {
|
||||
yield [this.keyList[i], this.valList[i]]
|
||||
}
|
||||
}
|
||||
|
||||
*keys () {
|
||||
for (const i of this.indexes()) {
|
||||
yield this.keyList[i]
|
||||
}
|
||||
}
|
||||
*rkeys () {
|
||||
for (const i of this.rindexes()) {
|
||||
yield this.keyList[i]
|
||||
}
|
||||
}
|
||||
|
||||
*values () {
|
||||
for (const i of this.indexes()) {
|
||||
yield this.valList[i]
|
||||
}
|
||||
}
|
||||
*rvalues () {
|
||||
for (const i of this.rindexes()) {
|
||||
yield this.valList[i]
|
||||
}
|
||||
}
|
||||
|
||||
[Symbol.iterator] () {
|
||||
return this.entries()
|
||||
}
|
||||
|
||||
find (fn, getOptions = {}) {
|
||||
for (const i of this.indexes()) {
|
||||
if (fn(this.valList[i], this.keyList[i], this)) {
|
||||
return this.get(this.keyList[i], getOptions)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
forEach (fn, thisp = this) {
|
||||
for (const i of this.indexes()) {
|
||||
fn.call(thisp, this.valList[i], this.keyList[i], this)
|
||||
}
|
||||
}
|
||||
|
||||
rforEach (fn, thisp = this) {
|
||||
for (const i of this.rindexes()) {
|
||||
fn.call(thisp, this.valList[i], this.keyList[i], this)
|
||||
}
|
||||
}
|
||||
|
||||
get prune () {
|
||||
deprecatedMethod('prune', 'purgeStale')
|
||||
return this.purgeStale
|
||||
}
|
||||
|
||||
purgeStale () {
|
||||
let deleted = false
|
||||
for (const i of this.rindexes({ allowStale: true })) {
|
||||
if (this.isStale(i)) {
|
||||
this.delete(this.keyList[i])
|
||||
deleted = true
|
||||
}
|
||||
}
|
||||
return deleted
|
||||
}
|
||||
|
||||
dump () {
|
||||
const arr = []
|
||||
for (const i of this.indexes()) {
|
||||
const key = this.keyList[i]
|
||||
const value = this.valList[i]
|
||||
const entry = { value }
|
||||
if (this.ttls) {
|
||||
entry.ttl = this.ttls[i]
|
||||
}
|
||||
if (this.sizes) {
|
||||
entry.size = this.sizes[i]
|
||||
}
|
||||
arr.unshift([key, entry])
|
||||
}
|
||||
return arr
|
||||
}
|
||||
|
||||
load (arr) {
|
||||
this.clear()
|
||||
for (const [key, entry] of arr) {
|
||||
this.set(key, entry.value, entry)
|
||||
}
|
||||
}
|
||||
|
||||
dispose (v, k, reason) {}
|
||||
|
||||
set (k, v, {
|
||||
ttl = this.ttl,
|
||||
noDisposeOnSet = this.noDisposeOnSet,
|
||||
size = 0,
|
||||
sizeCalculation = this.sizeCalculation,
|
||||
noUpdateTTL = this.noUpdateTTL,
|
||||
} = {}) {
|
||||
size = this.requireSize(k, v, size, sizeCalculation)
|
||||
let index = this.size === 0 ? undefined : this.keyMap.get(k)
|
||||
if (index === undefined) {
|
||||
// addition
|
||||
index = this.newIndex()
|
||||
this.keyList[index] = k
|
||||
this.valList[index] = v
|
||||
this.keyMap.set(k, index)
|
||||
this.next[this.tail] = index
|
||||
this.prev[index] = this.tail
|
||||
this.tail = index
|
||||
this.size ++
|
||||
this.addItemSize(index, v, k, size)
|
||||
noUpdateTTL = false
|
||||
} else {
|
||||
// update
|
||||
const oldVal = this.valList[index]
|
||||
if (v !== oldVal) {
|
||||
if (this.isBackgroundFetch(oldVal)) {
|
||||
oldVal.__abortController.abort()
|
||||
} else {
|
||||
if (!noDisposeOnSet) {
|
||||
this.dispose(oldVal, k, 'set')
|
||||
if (this.disposeAfter) {
|
||||
this.disposed.push([oldVal, k, 'set'])
|
||||
}
|
||||
}
|
||||
}
|
||||
this.removeItemSize(index)
|
||||
this.valList[index] = v
|
||||
this.addItemSize(index, v, k, size)
|
||||
}
|
||||
this.moveToTail(index)
|
||||
}
|
||||
if (ttl !== 0 && this.ttl === 0 && !this.ttls) {
|
||||
this.initializeTTLTracking()
|
||||
}
|
||||
if (!noUpdateTTL) {
|
||||
this.setItemTTL(index, ttl)
|
||||
}
|
||||
if (this.disposeAfter) {
|
||||
while (this.disposed.length) {
|
||||
this.disposeAfter(...this.disposed.shift())
|
||||
}
|
||||
}
|
||||
return this
|
||||
}
|
||||
|
||||
newIndex () {
|
||||
if (this.size === 0) {
|
||||
return this.tail
|
||||
}
|
||||
if (this.size === this.max) {
|
||||
return this.evict()
|
||||
}
|
||||
if (this.free.length !== 0) {
|
||||
return this.free.pop()
|
||||
}
|
||||
// initial fill, just keep writing down the list
|
||||
return this.initialFill++
|
||||
}
|
||||
|
||||
pop () {
|
||||
if (this.size) {
|
||||
const val = this.valList[this.head]
|
||||
this.evict()
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
evict () {
|
||||
const head = this.head
|
||||
const k = this.keyList[head]
|
||||
const v = this.valList[head]
|
||||
if (this.isBackgroundFetch(v)) {
|
||||
v.__abortController.abort()
|
||||
} else {
|
||||
this.dispose(v, k, 'evict')
|
||||
if (this.disposeAfter) {
|
||||
this.disposed.push([v, k, 'evict'])
|
||||
}
|
||||
}
|
||||
this.removeItemSize(head)
|
||||
this.head = this.next[head]
|
||||
this.keyMap.delete(k)
|
||||
this.size --
|
||||
return head
|
||||
}
|
||||
|
||||
has (k, { updateAgeOnHas = this.updateAgeOnHas } = {}) {
|
||||
const index = this.keyMap.get(k)
|
||||
if (index !== undefined) {
|
||||
if (!this.isStale(index)) {
|
||||
if (updateAgeOnHas) {
|
||||
this.updateItemAge(index)
|
||||
}
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// like get(), but without any LRU updating or TTL expiration
|
||||
peek (k, { allowStale = this.allowStale } = {}) {
|
||||
const index = this.keyMap.get(k)
|
||||
if (index !== undefined && (allowStale || !this.isStale(index))) {
|
||||
return this.valList[index]
|
||||
}
|
||||
}
|
||||
|
||||
backgroundFetch (k, index, options) {
|
||||
const v = index === undefined ? undefined : this.valList[index]
|
||||
if (this.isBackgroundFetch(v)) {
|
||||
return v
|
||||
}
|
||||
const ac = new AC()
|
||||
const fetchOpts = {
|
||||
signal: ac.signal,
|
||||
options,
|
||||
}
|
||||
const p = Promise.resolve(this.fetchMethod(k, v, fetchOpts)).then(v => {
|
||||
if (!ac.signal.aborted) {
|
||||
this.set(k, v, fetchOpts.options)
|
||||
}
|
||||
return v
|
||||
})
|
||||
p.__abortController = ac
|
||||
p.__staleWhileFetching = v
|
||||
if (index === undefined) {
|
||||
this.set(k, p, fetchOpts.options)
|
||||
index = this.keyMap.get(k)
|
||||
} else {
|
||||
this.valList[index] = p
|
||||
}
|
||||
return p
|
||||
}
|
||||
|
||||
isBackgroundFetch (p) {
|
||||
return p && typeof p === 'object' && typeof p.then === 'function' &&
|
||||
Object.prototype.hasOwnProperty.call(p, '__staleWhileFetching')
|
||||
}
|
||||
|
||||
// this takes the union of get() and set() opts, because it does both
|
||||
async fetch (k, {
|
||||
allowStale = this.allowStale,
|
||||
updateAgeOnGet = this.updateAgeOnGet,
|
||||
ttl = this.ttl,
|
||||
noDisposeOnSet = this.noDisposeOnSet,
|
||||
size = 0,
|
||||
sizeCalculation = this.sizeCalculation,
|
||||
noUpdateTTL = this.noUpdateTTL,
|
||||
} = {}) {
|
||||
if (!this.fetchMethod) {
|
||||
return this.get(k, {allowStale, updateAgeOnGet})
|
||||
}
|
||||
|
||||
const options = {
|
||||
allowStale,
|
||||
updateAgeOnGet,
|
||||
ttl,
|
||||
noDisposeOnSet,
|
||||
size,
|
||||
sizeCalculation,
|
||||
noUpdateTTL,
|
||||
}
|
||||
|
||||
let index = this.keyMap.get(k)
|
||||
if (index === undefined) {
|
||||
return this.backgroundFetch(k, index, options)
|
||||
} else {
|
||||
// in cache, maybe already fetching
|
||||
const v = this.valList[index]
|
||||
if (this.isBackgroundFetch(v)) {
|
||||
return allowStale && v.__staleWhileFetching !== undefined
|
||||
? v.__staleWhileFetching : v
|
||||
}
|
||||
|
||||
if (!this.isStale(index)) {
|
||||
this.moveToTail(index)
|
||||
if (updateAgeOnGet) {
|
||||
this.updateItemAge(index)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// ok, it is stale, and not already fetching
|
||||
// refresh the cache.
|
||||
const p = this.backgroundFetch(k, index, options)
|
||||
return allowStale && p.__staleWhileFetching !== undefined
|
||||
? p.__staleWhileFetching : p
|
||||
}
|
||||
}
|
||||
|
||||
get (k, {
|
||||
allowStale = this.allowStale,
|
||||
updateAgeOnGet = this.updateAgeOnGet,
|
||||
} = {}) {
|
||||
const index = this.keyMap.get(k)
|
||||
if (index !== undefined) {
|
||||
const value = this.valList[index]
|
||||
const fetching = this.isBackgroundFetch(value)
|
||||
if (this.isStale(index)) {
|
||||
// delete only if not an in-flight background fetch
|
||||
if (!fetching) {
|
||||
this.delete(k)
|
||||
return allowStale ? value : undefined
|
||||
} else {
|
||||
return allowStale ? value.__staleWhileFetching : undefined
|
||||
}
|
||||
} else {
|
||||
// if we're currently fetching it, we don't actually have it yet
|
||||
// it's not stale, which means this isn't a staleWhileRefetching,
|
||||
// so we just return undefined
|
||||
if (fetching) {
|
||||
return undefined
|
||||
}
|
||||
this.moveToTail(index)
|
||||
if (updateAgeOnGet) {
|
||||
this.updateItemAge(index)
|
||||
}
|
||||
return value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
connect (p, n) {
|
||||
this.prev[n] = p
|
||||
this.next[p] = n
|
||||
}
|
||||
|
||||
moveToTail (index) {
|
||||
// if tail already, nothing to do
|
||||
// if head, move head to next[index]
|
||||
// else
|
||||
// move next[prev[index]] to next[index] (head has no prev)
|
||||
// move prev[next[index]] to prev[index]
|
||||
// prev[index] = tail
|
||||
// next[tail] = index
|
||||
// tail = index
|
||||
if (index !== this.tail) {
|
||||
if (index === this.head) {
|
||||
this.head = this.next[index]
|
||||
} else {
|
||||
this.connect(this.prev[index], this.next[index])
|
||||
}
|
||||
this.connect(this.tail, index)
|
||||
this.tail = index
|
||||
}
|
||||
}
|
||||
|
||||
get del () {
|
||||
deprecatedMethod('del', 'delete')
|
||||
return this.delete
|
||||
}
|
||||
delete (k) {
|
||||
let deleted = false
|
||||
if (this.size !== 0) {
|
||||
const index = this.keyMap.get(k)
|
||||
if (index !== undefined) {
|
||||
deleted = true
|
||||
if (this.size === 1) {
|
||||
this.clear()
|
||||
} else {
|
||||
this.removeItemSize(index)
|
||||
const v = this.valList[index]
|
||||
if (this.isBackgroundFetch(v)) {
|
||||
v.__abortController.abort()
|
||||
} else {
|
||||
this.dispose(v, k, 'delete')
|
||||
if (this.disposeAfter) {
|
||||
this.disposed.push([v, k, 'delete'])
|
||||
}
|
||||
}
|
||||
this.keyMap.delete(k)
|
||||
this.keyList[index] = null
|
||||
this.valList[index] = null
|
||||
if (index === this.tail) {
|
||||
this.tail = this.prev[index]
|
||||
} else if (index === this.head) {
|
||||
this.head = this.next[index]
|
||||
} else {
|
||||
this.next[this.prev[index]] = this.next[index]
|
||||
this.prev[this.next[index]] = this.prev[index]
|
||||
}
|
||||
this.size --
|
||||
this.free.push(index)
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.disposed) {
|
||||
while (this.disposed.length) {
|
||||
this.disposeAfter(...this.disposed.shift())
|
||||
}
|
||||
}
|
||||
return deleted
|
||||
}
|
||||
|
||||
clear () {
|
||||
for (const index of this.rindexes({ allowStale: true })) {
|
||||
const v = this.valList[index]
|
||||
if (this.isBackgroundFetch(v)) {
|
||||
v.__abortController.abort()
|
||||
} else {
|
||||
const k = this.keyList[index]
|
||||
this.dispose(v, k, 'delete')
|
||||
if (this.disposeAfter) {
|
||||
this.disposed.push([v, k, 'delete'])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.keyMap.clear()
|
||||
this.valList.fill(null)
|
||||
this.keyList.fill(null)
|
||||
if (this.ttls) {
|
||||
this.ttls.fill(0)
|
||||
this.starts.fill(0)
|
||||
}
|
||||
if (this.sizes) {
|
||||
this.sizes.fill(0)
|
||||
}
|
||||
this.head = 0
|
||||
this.tail = 0
|
||||
this.initialFill = 1
|
||||
this.free.length = 0
|
||||
this.calculatedSize = 0
|
||||
this.size = 0
|
||||
if (this.disposed) {
|
||||
while (this.disposed.length) {
|
||||
this.disposeAfter(...this.disposed.shift())
|
||||
}
|
||||
}
|
||||
}
|
||||
get reset () {
|
||||
deprecatedMethod('reset', 'clear')
|
||||
return this.clear
|
||||
}
|
||||
|
||||
get length () {
|
||||
deprecatedProperty('length', 'size')
|
||||
return this.size
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = LRUCache
|
||||
44
receipeServer/frontend_old/node_modules/lru-cache/package.json
generated
vendored
Normal file
44
receipeServer/frontend_old/node_modules/lru-cache/package.json
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"name": "lru-cache",
|
||||
"description": "A cache object that deletes the least-recently-used items.",
|
||||
"version": "7.8.0",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me>",
|
||||
"keywords": [
|
||||
"mru",
|
||||
"lru",
|
||||
"cache"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"size": "size-limit",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"main": "index.js",
|
||||
"repository": "git://github.com/isaacs/node-lru-cache.git",
|
||||
"devDependencies": {
|
||||
"@size-limit/preset-small-lib": "^7.0.8",
|
||||
"benchmark": "^2.1.4",
|
||||
"clock-mock": "^1.0.4",
|
||||
"size-limit": "^7.0.8",
|
||||
"tap": "^15.1.6"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"tap": {
|
||||
"coverage-map": "map.js"
|
||||
},
|
||||
"size-limit": [
|
||||
{
|
||||
"path": "./index.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user