main funcions fixes
This commit is contained in:
16
desktop-operator/node_modules/cacache/LICENSE.md
generated
vendored
Normal file
16
desktop-operator/node_modules/cacache/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
ISC License
|
||||
|
||||
Copyright (c) npm, Inc.
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for
|
||||
any purpose with or without fee is hereby granted, provided that the
|
||||
above copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
|
||||
ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
|
||||
COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
|
||||
CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
|
||||
OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
|
||||
OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
|
||||
USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
716
desktop-operator/node_modules/cacache/README.md
generated
vendored
Normal file
716
desktop-operator/node_modules/cacache/README.md
generated
vendored
Normal file
@@ -0,0 +1,716 @@
|
||||
# cacache [](https://npm.im/cacache) [](https://npm.im/cacache) [](https://travis-ci.org/npm/cacache) [](https://ci.appveyor.com/project/npm/cacache) [](https://coveralls.io/github/npm/cacache?branch=latest)
|
||||
|
||||
[`cacache`](https://github.com/npm/cacache) is a Node.js library for managing
|
||||
local key and content address caches. It's really fast, really good at
|
||||
concurrency, and it will never give you corrupted data, even if cache files
|
||||
get corrupted or manipulated.
|
||||
|
||||
On systems that support user and group settings on files, cacache will
|
||||
match the `uid` and `gid` values to the folder where the cache lives, even
|
||||
when running as `root`.
|
||||
|
||||
It was written to be used as [npm](https://npm.im)'s local cache, but can
|
||||
just as easily be used on its own.
|
||||
|
||||
## Install
|
||||
|
||||
`$ npm install --save cacache`
|
||||
|
||||
## Table of Contents
|
||||
|
||||
* [Example](#example)
|
||||
* [Features](#features)
|
||||
* [Contributing](#contributing)
|
||||
* [API](#api)
|
||||
* [Using localized APIs](#localized-api)
|
||||
* Reading
|
||||
* [`ls`](#ls)
|
||||
* [`ls.stream`](#ls-stream)
|
||||
* [`get`](#get-data)
|
||||
* [`get.stream`](#get-stream)
|
||||
* [`get.info`](#get-info)
|
||||
* [`get.hasContent`](#get-hasContent)
|
||||
* Writing
|
||||
* [`put`](#put-data)
|
||||
* [`put.stream`](#put-stream)
|
||||
* [`rm.all`](#rm-all)
|
||||
* [`rm.entry`](#rm-entry)
|
||||
* [`rm.content`](#rm-content)
|
||||
* [`index.compact`](#index-compact)
|
||||
* [`index.insert`](#index-insert)
|
||||
* Utilities
|
||||
* [`clearMemoized`](#clear-memoized)
|
||||
* [`tmp.mkdir`](#tmp-mkdir)
|
||||
* [`tmp.withTmp`](#with-tmp)
|
||||
* Integrity
|
||||
* [Subresource Integrity](#integrity)
|
||||
* [`verify`](#verify)
|
||||
* [`verify.lastRun`](#verify-last-run)
|
||||
|
||||
### Example
|
||||
|
||||
```javascript
|
||||
const cacache = require('cacache')
|
||||
const fs = require('fs')
|
||||
|
||||
const tarball = '/path/to/mytar.tgz'
|
||||
const cachePath = '/tmp/my-toy-cache'
|
||||
const key = 'my-unique-key-1234'
|
||||
|
||||
// Cache it! Use `cachePath` as the root of the content cache
|
||||
cacache.put(cachePath, key, '10293801983029384').then(integrity => {
|
||||
console.log(`Saved content to ${cachePath}.`)
|
||||
})
|
||||
|
||||
const destination = '/tmp/mytar.tgz'
|
||||
|
||||
// Copy the contents out of the cache and into their destination!
|
||||
// But this time, use stream instead!
|
||||
cacache.get.stream(
|
||||
cachePath, key
|
||||
).pipe(
|
||||
fs.createWriteStream(destination)
|
||||
).on('finish', () => {
|
||||
console.log('done extracting!')
|
||||
})
|
||||
|
||||
// The same thing, but skip the key index.
|
||||
cacache.get.byDigest(cachePath, integrityHash).then(data => {
|
||||
fs.writeFile(destination, data, err => {
|
||||
console.log('tarball data fetched based on its sha512sum and written out!')
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
### Features
|
||||
|
||||
* Extraction by key or by content address (shasum, etc)
|
||||
* [Subresource Integrity](#integrity) web standard support
|
||||
* Multi-hash support - safely host sha1, sha512, etc, in a single cache
|
||||
* Automatic content deduplication
|
||||
* Fault tolerance (immune to corruption, partial writes, process races, etc)
|
||||
* Consistency guarantees on read and write (full data verification)
|
||||
* Lockless, high-concurrency cache access
|
||||
* Streaming support
|
||||
* Promise support
|
||||
* Fast -- sub-millisecond reads and writes including verification
|
||||
* Arbitrary metadata storage
|
||||
* Garbage collection and additional offline verification
|
||||
* Thorough test coverage
|
||||
* There's probably a bloom filter in there somewhere. Those are cool, right? 🤔
|
||||
|
||||
### Contributing
|
||||
|
||||
The cacache team enthusiastically welcomes contributions and project participation! There's a bunch of things you can do if you want to contribute! The [Contributor Guide](CONTRIBUTING.md) has all the information you need for everything from reporting bugs to contributing entire new features. Please don't hesitate to jump in if you'd like to, or even ask us questions if something isn't clear.
|
||||
|
||||
All participants and maintainers in this project are expected to follow [Code of Conduct](CODE_OF_CONDUCT.md), and just generally be excellent to each other.
|
||||
|
||||
Please refer to the [Changelog](CHANGELOG.md) for project history details, too.
|
||||
|
||||
Happy hacking!
|
||||
|
||||
### API
|
||||
|
||||
#### <a name="ls"></a> `> cacache.ls(cache) -> Promise<Object>`
|
||||
|
||||
Lists info for all entries currently in the cache as a single large object. Each
|
||||
entry in the object will be keyed by the unique index key, with corresponding
|
||||
[`get.info`](#get-info) objects as the values.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.ls(cachePath).then(console.log)
|
||||
// Output
|
||||
{
|
||||
'my-thing': {
|
||||
key: 'my-thing',
|
||||
integrity: 'sha512-BaSe64/EnCoDED+HAsh=='
|
||||
path: '.testcache/content/deadbeef', // joined with `cachePath`
|
||||
time: 12345698490,
|
||||
size: 4023948,
|
||||
metadata: {
|
||||
name: 'blah',
|
||||
version: '1.2.3',
|
||||
description: 'this was once a package but now it is my-thing'
|
||||
}
|
||||
},
|
||||
'other-thing': {
|
||||
key: 'other-thing',
|
||||
integrity: 'sha1-ANothER+hasH=',
|
||||
path: '.testcache/content/bada55',
|
||||
time: 11992309289,
|
||||
size: 111112
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### <a name="ls-stream"></a> `> cacache.ls.stream(cache) -> Readable`
|
||||
|
||||
Lists info for all entries currently in the cache as a single large object.
|
||||
|
||||
This works just like [`ls`](#ls), except [`get.info`](#get-info) entries are
|
||||
returned as `'data'` events on the returned stream.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.ls.stream(cachePath).on('data', console.log)
|
||||
// Output
|
||||
{
|
||||
key: 'my-thing',
|
||||
integrity: 'sha512-BaSe64HaSh',
|
||||
path: '.testcache/content/deadbeef', // joined with `cachePath`
|
||||
time: 12345698490,
|
||||
size: 13423,
|
||||
metadata: {
|
||||
name: 'blah',
|
||||
version: '1.2.3',
|
||||
description: 'this was once a package but now it is my-thing'
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
key: 'other-thing',
|
||||
integrity: 'whirlpool-WoWSoMuchSupport',
|
||||
path: '.testcache/content/bada55',
|
||||
time: 11992309289,
|
||||
size: 498023984029
|
||||
}
|
||||
|
||||
{
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
#### <a name="get-data"></a> `> cacache.get(cache, key, [opts]) -> Promise({data, metadata, integrity})`
|
||||
|
||||
Returns an object with the cached data, digest, and metadata identified by
|
||||
`key`. The `data` property of this object will be a `Buffer` instance that
|
||||
presumably holds some data that means something to you. I'm sure you know what
|
||||
to do with it! cacache just won't care.
|
||||
|
||||
`integrity` is a [Subresource
|
||||
Integrity](#integrity)
|
||||
string. That is, a string that can be used to verify `data`, which looks like
|
||||
`<hash-algorithm>-<base64-integrity-hash>`.
|
||||
|
||||
If there is no content identified by `key`, or if the locally-stored data does
|
||||
not pass the validity checksum, the promise will be rejected.
|
||||
|
||||
A sub-function, `get.byDigest` may be used for identical behavior, except lookup
|
||||
will happen by integrity hash, bypassing the index entirely. This version of the
|
||||
function *only* returns `data` itself, without any wrapper.
|
||||
|
||||
See: [options](#get-options)
|
||||
|
||||
##### Note
|
||||
|
||||
This function loads the entire cache entry into memory before returning it. If
|
||||
you're dealing with Very Large data, consider using [`get.stream`](#get-stream)
|
||||
instead.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
// Look up by key
|
||||
cache.get(cachePath, 'my-thing').then(console.log)
|
||||
// Output:
|
||||
{
|
||||
metadata: {
|
||||
thingName: 'my'
|
||||
},
|
||||
integrity: 'sha512-BaSe64HaSh',
|
||||
data: Buffer#<deadbeef>,
|
||||
size: 9320
|
||||
}
|
||||
|
||||
// Look up by digest
|
||||
cache.get.byDigest(cachePath, 'sha512-BaSe64HaSh').then(console.log)
|
||||
// Output:
|
||||
Buffer#<deadbeef>
|
||||
```
|
||||
|
||||
#### <a name="get-stream"></a> `> cacache.get.stream(cache, key, [opts]) -> Readable`
|
||||
|
||||
Returns a [Readable Stream](https://nodejs.org/api/stream.html#stream_readable_streams) of the cached data identified by `key`.
|
||||
|
||||
If there is no content identified by `key`, or if the locally-stored data does
|
||||
not pass the validity checksum, an error will be emitted.
|
||||
|
||||
`metadata` and `integrity` events will be emitted before the stream closes, if
|
||||
you need to collect that extra data about the cached entry.
|
||||
|
||||
A sub-function, `get.stream.byDigest` may be used for identical behavior,
|
||||
except lookup will happen by integrity hash, bypassing the index entirely. This
|
||||
version does not emit the `metadata` and `integrity` events at all.
|
||||
|
||||
See: [options](#get-options)
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
// Look up by key
|
||||
cache.get.stream(
|
||||
cachePath, 'my-thing'
|
||||
).on('metadata', metadata => {
|
||||
console.log('metadata:', metadata)
|
||||
}).on('integrity', integrity => {
|
||||
console.log('integrity:', integrity)
|
||||
}).pipe(
|
||||
fs.createWriteStream('./x.tgz')
|
||||
)
|
||||
// Outputs:
|
||||
metadata: { ... }
|
||||
integrity: 'sha512-SoMeDIGest+64=='
|
||||
|
||||
// Look up by digest
|
||||
cache.get.stream.byDigest(
|
||||
cachePath, 'sha512-SoMeDIGest+64=='
|
||||
).pipe(
|
||||
fs.createWriteStream('./x.tgz')
|
||||
)
|
||||
```
|
||||
|
||||
#### <a name="get-info"></a> `> cacache.get.info(cache, key) -> Promise`
|
||||
|
||||
Looks up `key` in the cache index, returning information about the entry if
|
||||
one exists.
|
||||
|
||||
##### Fields
|
||||
|
||||
* `key` - Key the entry was looked up under. Matches the `key` argument.
|
||||
* `integrity` - [Subresource Integrity hash](#integrity) for the content this entry refers to.
|
||||
* `path` - Filesystem path where content is stored, joined with `cache` argument.
|
||||
* `time` - Timestamp the entry was first added on.
|
||||
* `metadata` - User-assigned metadata associated with the entry/content.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.get.info(cachePath, 'my-thing').then(console.log)
|
||||
|
||||
// Output
|
||||
{
|
||||
key: 'my-thing',
|
||||
integrity: 'sha256-MUSTVERIFY+ALL/THINGS=='
|
||||
path: '.testcache/content/deadbeef',
|
||||
time: 12345698490,
|
||||
size: 849234,
|
||||
metadata: {
|
||||
name: 'blah',
|
||||
version: '1.2.3',
|
||||
description: 'this was once a package but now it is my-thing'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### <a name="get-hasContent"></a> `> cacache.get.hasContent(cache, integrity) -> Promise`
|
||||
|
||||
Looks up a [Subresource Integrity hash](#integrity) in the cache. If content
|
||||
exists for this `integrity`, it will return an object, with the specific single integrity hash
|
||||
that was found in `sri` key, and the size of the found content as `size`. If no content exists for this integrity, it will return `false`.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.get.hasContent(cachePath, 'sha256-MUSTVERIFY+ALL/THINGS==').then(console.log)
|
||||
|
||||
// Output
|
||||
{
|
||||
sri: {
|
||||
source: 'sha256-MUSTVERIFY+ALL/THINGS==',
|
||||
algorithm: 'sha256',
|
||||
digest: 'MUSTVERIFY+ALL/THINGS==',
|
||||
options: []
|
||||
},
|
||||
size: 9001
|
||||
}
|
||||
|
||||
cacache.get.hasContent(cachePath, 'sha521-NOT+IN/CACHE==').then(console.log)
|
||||
|
||||
// Output
|
||||
false
|
||||
```
|
||||
|
||||
##### <a name="get-options"></a> Options
|
||||
|
||||
##### `opts.integrity`
|
||||
If present, the pre-calculated digest for the inserted content. If this option
|
||||
is provided and does not match the post-insertion digest, insertion will fail
|
||||
with an `EINTEGRITY` error.
|
||||
|
||||
##### `opts.memoize`
|
||||
|
||||
Default: null
|
||||
|
||||
If explicitly truthy, cacache will read from memory and memoize data on bulk read. If `false`, cacache will read from disk data. Reader functions by default read from in-memory cache.
|
||||
|
||||
##### `opts.size`
|
||||
If provided, the data stream will be verified to check that enough data was
|
||||
passed through. If there's more or less data than expected, insertion will fail
|
||||
with an `EBADSIZE` error.
|
||||
|
||||
|
||||
#### <a name="put-data"></a> `> cacache.put(cache, key, data, [opts]) -> Promise`
|
||||
|
||||
Inserts data passed to it into the cache. The returned Promise resolves with a
|
||||
digest (generated according to [`opts.algorithms`](#optsalgorithms)) after the
|
||||
cache entry has been successfully written.
|
||||
|
||||
See: [options](#put-options)
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
fetch(
|
||||
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
|
||||
).then(data => {
|
||||
return cacache.put(cachePath, 'registry.npmjs.org|cacache@1.0.0', data)
|
||||
}).then(integrity => {
|
||||
console.log('integrity hash is', integrity)
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="put-stream"></a> `> cacache.put.stream(cache, key, [opts]) -> Writable`
|
||||
|
||||
Returns a [Writable
|
||||
Stream](https://nodejs.org/api/stream.html#stream_writable_streams) that inserts
|
||||
data written to it into the cache. Emits an `integrity` event with the digest of
|
||||
written contents when it succeeds.
|
||||
|
||||
See: [options](#put-options)
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
request.get(
|
||||
'https://registry.npmjs.org/cacache/-/cacache-1.0.0.tgz'
|
||||
).pipe(
|
||||
cacache.put.stream(
|
||||
cachePath, 'registry.npmjs.org|cacache@1.0.0'
|
||||
).on('integrity', d => console.log(`integrity digest is ${d}`))
|
||||
)
|
||||
```
|
||||
|
||||
##### <a name="put-options"></a> Options
|
||||
|
||||
##### `opts.metadata`
|
||||
|
||||
Arbitrary metadata to be attached to the inserted key.
|
||||
|
||||
##### `opts.size`
|
||||
|
||||
If provided, the data stream will be verified to check that enough data was
|
||||
passed through. If there's more or less data than expected, insertion will fail
|
||||
with an `EBADSIZE` error.
|
||||
|
||||
##### `opts.integrity`
|
||||
|
||||
If present, the pre-calculated digest for the inserted content. If this option
|
||||
is provided and does not match the post-insertion digest, insertion will fail
|
||||
with an `EINTEGRITY` error.
|
||||
|
||||
`algorithms` has no effect if this option is present.
|
||||
|
||||
##### `opts.integrityEmitter`
|
||||
|
||||
*Streaming only* If present, uses the provided event emitter as a source of
|
||||
truth for both integrity and size. This allows use cases where integrity is
|
||||
already being calculated outside of cacache to reuse that data instead of
|
||||
calculating it a second time.
|
||||
|
||||
The emitter must emit both the `'integrity'` and `'size'` events.
|
||||
|
||||
NOTE: If this option is provided, you must verify that you receive the correct
|
||||
integrity value yourself and emit an `'error'` event if there is a mismatch.
|
||||
[ssri Integrity Streams](https://github.com/npm/ssri#integrity-stream) do this for you when given an expected integrity.
|
||||
|
||||
##### `opts.algorithms`
|
||||
|
||||
Default: ['sha512']
|
||||
|
||||
Hashing algorithms to use when calculating the [subresource integrity
|
||||
digest](#integrity)
|
||||
for inserted data. Can use any algorithm listed in `crypto.getHashes()` or
|
||||
`'omakase'`/`'お任せします'` to pick a random hash algorithm on each insertion. You
|
||||
may also use any anagram of `'modnar'` to use this feature.
|
||||
|
||||
Currently only supports one algorithm at a time (i.e., an array length of
|
||||
exactly `1`). Has no effect if `opts.integrity` is present.
|
||||
|
||||
##### `opts.memoize`
|
||||
|
||||
Default: null
|
||||
|
||||
If provided, cacache will memoize the given cache insertion in memory, bypassing
|
||||
any filesystem checks for that key or digest in future cache fetches. Nothing
|
||||
will be written to the in-memory cache unless this option is explicitly truthy.
|
||||
|
||||
If `opts.memoize` is an object or a `Map`-like (that is, an object with `get`
|
||||
and `set` methods), it will be written to instead of the global memoization
|
||||
cache.
|
||||
|
||||
Reading from disk data can be forced by explicitly passing `memoize: false` to
|
||||
the reader functions, but their default will be to read from memory.
|
||||
|
||||
##### `opts.tmpPrefix`
|
||||
Default: null
|
||||
|
||||
Prefix to append on the temporary directory name inside the cache's tmp dir.
|
||||
|
||||
#### <a name="rm-all"></a> `> cacache.rm.all(cache) -> Promise`
|
||||
|
||||
Clears the entire cache. Mainly by blowing away the cache directory itself.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.rm.all(cachePath).then(() => {
|
||||
console.log('THE APOCALYPSE IS UPON US 😱')
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="rm-entry"></a> `> cacache.rm.entry(cache, key, [opts]) -> Promise`
|
||||
|
||||
Alias: `cacache.rm`
|
||||
|
||||
Removes the index entry for `key`. Content will still be accessible if
|
||||
requested directly by content address ([`get.stream.byDigest`](#get-stream)).
|
||||
|
||||
By default, this appends a new entry to the index with an integrity of `null`.
|
||||
If `opts.removeFully` is set to `true` then the index file itself will be
|
||||
physically deleted rather than appending a `null`.
|
||||
|
||||
To remove the content itself (which might still be used by other entries), use
|
||||
[`rm.content`](#rm-content). Or, to safely vacuum any unused content, use
|
||||
[`verify`](#verify).
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.rm.entry(cachePath, 'my-thing').then(() => {
|
||||
console.log('I did not like it anyway')
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="rm-content"></a> `> cacache.rm.content(cache, integrity) -> Promise`
|
||||
|
||||
Removes the content identified by `integrity`. Any index entries referring to it
|
||||
will not be usable again until the content is re-added to the cache with an
|
||||
identical digest.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.rm.content(cachePath, 'sha512-SoMeDIGest/IN+BaSE64==').then(() => {
|
||||
console.log('data for my-thing is gone!')
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="index-compact"></a> `> cacache.index.compact(cache, key, matchFn, [opts]) -> Promise`
|
||||
|
||||
Uses `matchFn`, which must be a synchronous function that accepts two entries
|
||||
and returns a boolean indicating whether or not the two entries match, to
|
||||
deduplicate all entries in the cache for the given `key`.
|
||||
|
||||
If `opts.validateEntry` is provided, it will be called as a function with the
|
||||
only parameter being a single index entry. The function must return a Boolean,
|
||||
if it returns `true` the entry is considered valid and will be kept in the index,
|
||||
if it returns `false` the entry will be removed from the index.
|
||||
|
||||
If `opts.validateEntry` is not provided, however, every entry in the index will
|
||||
be deduplicated and kept until the first `null` integrity is reached, removing
|
||||
all entries that were written before the `null`.
|
||||
|
||||
The deduplicated list of entries is both written to the index, replacing the
|
||||
existing content, and returned in the Promise.
|
||||
|
||||
#### <a name="index-insert"></a> `> cacache.index.insert(cache, key, integrity, opts) -> Promise`
|
||||
|
||||
Writes an index entry to the cache for the given `key` without writing content.
|
||||
|
||||
It is assumed if you are using this method, you have already stored the content
|
||||
some other way and you only wish to add a new index to that content. The `metadata`
|
||||
and `size` properties are read from `opts` and used as part of the index entry.
|
||||
|
||||
Returns a Promise resolving to the newly added entry.
|
||||
|
||||
#### <a name="clear-memoized"></a> `> cacache.clearMemoized()`
|
||||
|
||||
Completely resets the in-memory entry cache.
|
||||
|
||||
#### <a name="tmp-mkdir"></a> `> tmp.mkdir(cache, opts) -> Promise<Path>`
|
||||
|
||||
Returns a unique temporary directory inside the cache's `tmp` dir. This
|
||||
directory will use the same safe user assignment that all the other stuff use.
|
||||
|
||||
Once the directory is made, it's the user's responsibility that all files
|
||||
within are given the appropriate `gid`/`uid` ownership settings to match
|
||||
the rest of the cache. If not, you can ask cacache to do it for you by
|
||||
calling [`tmp.fix()`](#tmp-fix), which will fix all tmp directory
|
||||
permissions.
|
||||
|
||||
If you want automatic cleanup of this directory, use
|
||||
[`tmp.withTmp()`](#with-tpm)
|
||||
|
||||
See: [options](#tmp-options)
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.tmp.mkdir(cache).then(dir => {
|
||||
fs.writeFile(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="tmp-fix"></a> `> tmp.fix(cache) -> Promise`
|
||||
|
||||
Sets the `uid` and `gid` properties on all files and folders within the tmp
|
||||
folder to match the rest of the cache.
|
||||
|
||||
Use this after manually writing files into [`tmp.mkdir`](#tmp-mkdir) or
|
||||
[`tmp.withTmp`](#with-tmp).
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.tmp.mkdir(cache).then(dir => {
|
||||
writeFile(path.join(dir, 'file'), someData).then(() => {
|
||||
// make sure we didn't just put a root-owned file in the cache
|
||||
cacache.tmp.fix().then(() => {
|
||||
// all uids and gids match now
|
||||
})
|
||||
})
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="with-tmp"></a> `> tmp.withTmp(cache, opts, cb) -> Promise`
|
||||
|
||||
Creates a temporary directory with [`tmp.mkdir()`](#tmp-mkdir) and calls `cb`
|
||||
with it. The created temporary directory will be removed when the return value
|
||||
of `cb()` resolves, the tmp directory will be automatically deleted once that
|
||||
promise completes.
|
||||
|
||||
The same caveats apply when it comes to managing permissions for the tmp dir's
|
||||
contents.
|
||||
|
||||
See: [options](#tmp-options)
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.tmp.withTmp(cache, dir => {
|
||||
return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
|
||||
}).then(() => {
|
||||
// `dir` no longer exists
|
||||
})
|
||||
```
|
||||
|
||||
##### <a name="tmp-options"></a> Options
|
||||
|
||||
##### `opts.tmpPrefix`
|
||||
Default: null
|
||||
|
||||
Prefix to append on the temporary directory name inside the cache's tmp dir.
|
||||
|
||||
#### <a name="integrity"></a> Subresource Integrity Digests
|
||||
|
||||
For content verification and addressing, cacache uses strings following the
|
||||
[Subresource
|
||||
Integrity spec](https://developer.mozilla.org/en-US/docs/Web/Security/Subresource_Integrity).
|
||||
That is, any time cacache expects an `integrity` argument or option, it
|
||||
should be in the format `<hashAlgorithm>-<base64-hash>`.
|
||||
|
||||
One deviation from the current spec is that cacache will support any hash
|
||||
algorithms supported by the underlying Node.js process. You can use
|
||||
`crypto.getHashes()` to see which ones you can use.
|
||||
|
||||
##### Generating Digests Yourself
|
||||
|
||||
If you have an existing content shasum, they are generally formatted as a
|
||||
hexadecimal string (that is, a sha1 would look like:
|
||||
`5f5513f8822fdbe5145af33b64d8d970dcf95c6e`). In order to be compatible with
|
||||
cacache, you'll need to convert this to an equivalent subresource integrity
|
||||
string. For this example, the corresponding hash would be:
|
||||
`sha1-X1UT+IIv2+UUWvM7ZNjZcNz5XG4=`.
|
||||
|
||||
If you want to generate an integrity string yourself for existing data, you can
|
||||
use something like this:
|
||||
|
||||
```javascript
|
||||
const crypto = require('crypto')
|
||||
const hashAlgorithm = 'sha512'
|
||||
const data = 'foobarbaz'
|
||||
|
||||
const integrity = (
|
||||
hashAlgorithm +
|
||||
'-' +
|
||||
crypto.createHash(hashAlgorithm).update(data).digest('base64')
|
||||
)
|
||||
```
|
||||
|
||||
You can also use [`ssri`](https://npm.im/ssri) to have a richer set of functionality
|
||||
around SRI strings, including generation, parsing, and translating from existing
|
||||
hex-formatted strings.
|
||||
|
||||
#### <a name="verify"></a> `> cacache.verify(cache, opts) -> Promise`
|
||||
|
||||
Checks out and fixes up your cache:
|
||||
|
||||
* Cleans up corrupted or invalid index entries.
|
||||
* Custom entry filtering options.
|
||||
* Garbage collects any content entries not referenced by the index.
|
||||
* Checks integrity for all content entries and removes invalid content.
|
||||
* Fixes cache ownership.
|
||||
* Removes the `tmp` directory in the cache and all its contents.
|
||||
|
||||
When it's done, it'll return an object with various stats about the verification
|
||||
process, including amount of storage reclaimed, number of valid entries, number
|
||||
of entries removed, etc.
|
||||
|
||||
##### <a name="verify-options"></a> Options
|
||||
|
||||
##### `opts.concurrency`
|
||||
|
||||
Default: 20
|
||||
|
||||
Number of concurrently read files in the filesystem while doing clean up.
|
||||
|
||||
##### `opts.filter`
|
||||
Receives a formatted entry. Return false to remove it.
|
||||
Note: might be called more than once on the same entry.
|
||||
|
||||
##### `opts.log`
|
||||
Custom logger function:
|
||||
```
|
||||
log: { silly () {} }
|
||||
log.silly('verify', 'verifying cache at', cache)
|
||||
```
|
||||
|
||||
##### Example
|
||||
|
||||
```sh
|
||||
echo somegarbage >> $CACHEPATH/content/deadbeef
|
||||
```
|
||||
|
||||
```javascript
|
||||
cacache.verify(cachePath).then(stats => {
|
||||
// deadbeef collected, because of invalid checksum.
|
||||
console.log('cache is much nicer now! stats:', stats)
|
||||
})
|
||||
```
|
||||
|
||||
#### <a name="verify-last-run"></a> `> cacache.verify.lastRun(cache) -> Promise`
|
||||
|
||||
Returns a `Date` representing the last time `cacache.verify` was run on `cache`.
|
||||
|
||||
##### Example
|
||||
|
||||
```javascript
|
||||
cacache.verify(cachePath).then(() => {
|
||||
cacache.verify.lastRun(cachePath).then(lastTime => {
|
||||
console.log('cacache.verify was last called on' + lastTime)
|
||||
})
|
||||
})
|
||||
```
|
||||
29
desktop-operator/node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
29
desktop-operator/node_modules/cacache/lib/content/path.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
const contentVer = require('../../package.json')['cache-version'].content
|
||||
const hashToSegments = require('../util/hash-to-segments')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
|
||||
// Current format of content file path:
|
||||
//
|
||||
// sha512-BaSE64Hex= ->
|
||||
// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
|
||||
//
|
||||
module.exports = contentPath
|
||||
|
||||
function contentPath (cache, integrity) {
|
||||
const sri = ssri.parse(integrity, { single: true })
|
||||
// contentPath is the *strongest* algo given
|
||||
return path.join(
|
||||
contentDir(cache),
|
||||
sri.algorithm,
|
||||
...hashToSegments(sri.hexDigest())
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.contentDir = contentDir
|
||||
|
||||
function contentDir (cache) {
|
||||
return path.join(cache, `content-v${contentVer}`)
|
||||
}
|
||||
241
desktop-operator/node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
241
desktop-operator/node_modules/cacache/lib/content/read.js
generated
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const ssri = require('ssri')
|
||||
const contentPath = require('./path')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
module.exports = read
|
||||
|
||||
const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
|
||||
async function read (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// get size
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && stat.size !== size) {
|
||||
throw sizeError(size, stat.size)
|
||||
}
|
||||
|
||||
if (stat.size > MAX_SINGLE_READ_SIZE) {
|
||||
return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
|
||||
}
|
||||
|
||||
const data = await fs.readFile(cpath, { encoding: null })
|
||||
if (!ssri.checkData(data, sri)) {
|
||||
throw integrityError(sri, cpath)
|
||||
}
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
const readPipeline = (cpath, size, sri, stream) => {
|
||||
stream.push(
|
||||
new fsm.ReadStream(cpath, {
|
||||
size,
|
||||
readSize: MAX_SINGLE_READ_SIZE,
|
||||
}),
|
||||
ssri.integrityStream({
|
||||
integrity: sri,
|
||||
size,
|
||||
})
|
||||
)
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.sync = readSync
|
||||
|
||||
function readSync (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
const data = fs.readFileSync(cpath, { encoding: null })
|
||||
if (typeof size === 'number' && size !== data.length) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
if (ssri.checkData(data, sri)) {
|
||||
return data
|
||||
}
|
||||
|
||||
throw integrityError(sri, cpath)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.stream = readStream
|
||||
module.exports.readStream = readStream
|
||||
|
||||
function readStream (cache, integrity, opts = {}) {
|
||||
const { size } = opts
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
// just stat to ensure it exists
|
||||
const stat = await fs.stat(cpath)
|
||||
return { stat, cpath, sri }
|
||||
})
|
||||
if (typeof size === 'number' && size !== stat.size) {
|
||||
return stream.emit('error', sizeError(size, stat.size))
|
||||
}
|
||||
|
||||
return readPipeline(cpath, stat.size, sri, stream)
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
module.exports.copy.sync = copySync
|
||||
|
||||
function copy (cache, integrity, dest) {
|
||||
return withContentSri(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFile(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
function copySync (cache, integrity, dest) {
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
return fs.copyFileSync(cpath, dest)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.hasContent = hasContent
|
||||
|
||||
async function hasContent (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
try {
|
||||
return await withContentSri(cache, integrity, async (cpath, sri) => {
|
||||
const stat = await fs.stat(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
})
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.hasContent.sync = hasContentSync
|
||||
|
||||
function hasContentSync (cache, integrity) {
|
||||
if (!integrity) {
|
||||
return false
|
||||
}
|
||||
|
||||
return withContentSriSync(cache, integrity, (cpath, sri) => {
|
||||
try {
|
||||
const stat = fs.statSync(cpath)
|
||||
return { size: stat.size, sri, stat }
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (err.code === 'EPERM') {
|
||||
/* istanbul ignore else */
|
||||
if (process.platform !== 'win32') {
|
||||
throw err
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function withContentSri (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
// Can't use race here because a generic error can happen before
|
||||
// a ENOENT error, and can happen before a valid result
|
||||
const results = await Promise.all(digests.map(async (meta) => {
|
||||
try {
|
||||
return await withContentSri(cache, meta, fn)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return Object.assign(
|
||||
new Error('No matching content found for ' + sri.toString()),
|
||||
{ code: 'ENOENT' }
|
||||
)
|
||||
}
|
||||
return err
|
||||
}
|
||||
}))
|
||||
// Return the first non error if it is found
|
||||
const result = results.find((r) => !(r instanceof Error))
|
||||
if (result) {
|
||||
return result
|
||||
}
|
||||
|
||||
// Throw the No matching content found error
|
||||
const enoentError = results.find((r) => r.code === 'ENOENT')
|
||||
if (enoentError) {
|
||||
throw enoentError
|
||||
}
|
||||
|
||||
// Throw generic error
|
||||
throw results.find((r) => r instanceof Error)
|
||||
}
|
||||
}
|
||||
|
||||
function withContentSriSync (cache, integrity, fn) {
|
||||
const sri = ssri.parse(integrity)
|
||||
// If `integrity` has multiple entries, pick the first digest
|
||||
// with available local data.
|
||||
const algo = sri.pickAlgorithm()
|
||||
const digests = sri[algo]
|
||||
if (digests.length <= 1) {
|
||||
const cpath = contentPath(cache, digests[0])
|
||||
return fn(cpath, digests[0])
|
||||
} else {
|
||||
let lastErr = null
|
||||
for (const meta of digests) {
|
||||
try {
|
||||
return withContentSriSync(cache, meta, fn)
|
||||
} catch (err) {
|
||||
lastErr = err
|
||||
}
|
||||
}
|
||||
throw lastErr
|
||||
}
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function integrityError (sri, path) {
|
||||
const err = new Error(`Integrity verification failed for ${sri} (${path})`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.sri = sri
|
||||
err.path = path
|
||||
return err
|
||||
}
|
||||
20
desktop-operator/node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
20
desktop-operator/node_modules/cacache/lib/content/rm.js
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const { hasContent } = require('./read')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
|
||||
module.exports = rm
|
||||
|
||||
async function rm (cache, integrity) {
|
||||
const content = await hasContent(cache, integrity)
|
||||
// ~pretty~ sure we can't end up with a content lacking sri, but be safe
|
||||
if (content && content.sri) {
|
||||
await rimraf(contentPath(cache, content.sri))
|
||||
return true
|
||||
} else {
|
||||
return false
|
||||
}
|
||||
}
|
||||
189
desktop-operator/node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
189
desktop-operator/node_modules/cacache/lib/content/write.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
'use strict'
|
||||
|
||||
const events = require('events')
|
||||
const util = require('util')
|
||||
|
||||
const contentPath = require('./path')
|
||||
const fixOwner = require('../util/fix-owner')
|
||||
const fs = require('@npmcli/fs')
|
||||
const moveFile = require('../util/move-file')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
const Flush = require('minipass-flush')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
const fsm = require('fs-minipass')
|
||||
|
||||
module.exports = write
|
||||
|
||||
async function write (cache, data, opts = {}) {
|
||||
const { algorithms, size, integrity } = opts
|
||||
if (algorithms && algorithms.length > 1) {
|
||||
throw new Error('opts.algorithms only supports a single algorithm for now')
|
||||
}
|
||||
|
||||
if (typeof size === 'number' && data.length !== size) {
|
||||
throw sizeError(size, data.length)
|
||||
}
|
||||
|
||||
const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
|
||||
if (integrity && !ssri.checkData(data, integrity, opts)) {
|
||||
throw checksumError(integrity, sri)
|
||||
}
|
||||
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
await fs.writeFile(tmp.target, data, { flag: 'wx' })
|
||||
await moveToDestination(tmp, cache, sri, opts)
|
||||
return { integrity: sri, size: data.length }
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream = writeStream
|
||||
|
||||
// writes proxied to the 'inputStream' that is passed to the Promise
|
||||
// 'end' is deferred until content is handled.
|
||||
class CacacheWriteStream extends Flush {
|
||||
constructor (cache, opts) {
|
||||
super()
|
||||
this.opts = opts
|
||||
this.cache = cache
|
||||
this.inputStream = new Minipass()
|
||||
this.inputStream.on('error', er => this.emit('error', er))
|
||||
this.inputStream.on('drain', () => this.emit('drain'))
|
||||
this.handleContentP = null
|
||||
}
|
||||
|
||||
write (chunk, encoding, cb) {
|
||||
if (!this.handleContentP) {
|
||||
this.handleContentP = handleContent(
|
||||
this.inputStream,
|
||||
this.cache,
|
||||
this.opts
|
||||
)
|
||||
}
|
||||
return this.inputStream.write(chunk, encoding, cb)
|
||||
}
|
||||
|
||||
flush (cb) {
|
||||
this.inputStream.end(() => {
|
||||
if (!this.handleContentP) {
|
||||
const e = new Error('Cache input stream was empty')
|
||||
e.code = 'ENODATA'
|
||||
// empty streams are probably emitting end right away.
|
||||
// defer this one tick by rejecting a promise on it.
|
||||
return Promise.reject(e).catch(cb)
|
||||
}
|
||||
// eslint-disable-next-line promise/catch-or-return
|
||||
this.handleContentP.then(
|
||||
(res) => {
|
||||
res.integrity && this.emit('integrity', res.integrity)
|
||||
// eslint-disable-next-line promise/always-return
|
||||
res.size !== null && this.emit('size', res.size)
|
||||
cb()
|
||||
},
|
||||
(er) => cb(er)
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function writeStream (cache, opts = {}) {
|
||||
return new CacacheWriteStream(cache, opts)
|
||||
}
|
||||
|
||||
async function handleContent (inputStream, cache, opts) {
|
||||
const tmp = await makeTmp(cache, opts)
|
||||
try {
|
||||
const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
|
||||
await moveToDestination(
|
||||
tmp,
|
||||
cache,
|
||||
res.integrity,
|
||||
opts
|
||||
)
|
||||
return res
|
||||
} finally {
|
||||
if (!tmp.moved) {
|
||||
await rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
|
||||
const outStream = new fsm.WriteStream(tmpTarget, {
|
||||
flags: 'wx',
|
||||
})
|
||||
|
||||
if (opts.integrityEmitter) {
|
||||
// we need to create these all simultaneously since they can fire in any order
|
||||
const [integrity, size] = await Promise.all([
|
||||
events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
|
||||
events.once(opts.integrityEmitter, 'size').then(res => res[0]),
|
||||
new Pipeline(inputStream, outStream).promise(),
|
||||
])
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
let integrity
|
||||
let size
|
||||
const hashStream = ssri.integrityStream({
|
||||
integrity: opts.integrity,
|
||||
algorithms: opts.algorithms,
|
||||
size: opts.size,
|
||||
})
|
||||
hashStream.on('integrity', i => {
|
||||
integrity = i
|
||||
})
|
||||
hashStream.on('size', s => {
|
||||
size = s
|
||||
})
|
||||
|
||||
const pipeline = new Pipeline(inputStream, hashStream, outStream)
|
||||
await pipeline.promise()
|
||||
return { integrity, size }
|
||||
}
|
||||
|
||||
async function makeTmp (cache, opts) {
|
||||
const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fixOwner.mkdirfix(cache, path.dirname(tmpTarget))
|
||||
return {
|
||||
target: tmpTarget,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
async function moveToDestination (tmp, cache, sri, opts) {
|
||||
const destination = contentPath(cache, sri)
|
||||
const destDir = path.dirname(destination)
|
||||
|
||||
await fixOwner.mkdirfix(cache, destDir)
|
||||
await moveFile(tmp.target, destination)
|
||||
tmp.moved = true
|
||||
await fixOwner.chownr(cache, destination)
|
||||
}
|
||||
|
||||
function sizeError (expected, found) {
|
||||
/* eslint-disable-next-line max-len */
|
||||
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
err.code = 'EBADSIZE'
|
||||
return err
|
||||
}
|
||||
|
||||
function checksumError (expected, found) {
|
||||
const err = new Error(`Integrity check failed:
|
||||
Wanted: ${expected}
|
||||
Found: ${found}`)
|
||||
err.code = 'EINTEGRITY'
|
||||
err.expected = expected
|
||||
err.found = found
|
||||
return err
|
||||
}
|
||||
404
desktop-operator/node_modules/cacache/lib/entry-index.js
generated
vendored
Normal file
404
desktop-operator/node_modules/cacache/lib/entry-index.js
generated
vendored
Normal file
@@ -0,0 +1,404 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
const crypto = require('crypto')
|
||||
const fs = require('@npmcli/fs')
|
||||
const Minipass = require('minipass')
|
||||
const path = require('path')
|
||||
const ssri = require('ssri')
|
||||
const uniqueFilename = require('unique-filename')
|
||||
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const hashToSegments = require('./util/hash-to-segments')
|
||||
const indexV = require('../package.json')['cache-version'].index
|
||||
const moveFile = require('@npmcli/move-file')
|
||||
const _rimraf = require('rimraf')
|
||||
const rimraf = util.promisify(_rimraf)
|
||||
rimraf.sync = _rimraf.sync
|
||||
|
||||
module.exports.NotFoundError = class NotFoundError extends Error {
|
||||
constructor (cache, key) {
|
||||
super(`No cache entry for ${key} found in ${cache}`)
|
||||
this.code = 'ENOENT'
|
||||
this.cache = cache
|
||||
this.key = key
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.compact = compact
|
||||
|
||||
async function compact (cache, key, matchFn, opts = {}) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entries = await bucketEntries(bucket)
|
||||
const newEntries = []
|
||||
// we loop backwards because the bottom-most result is the newest
|
||||
// since we add new entries with appendFile
|
||||
for (let i = entries.length - 1; i >= 0; --i) {
|
||||
const entry = entries[i]
|
||||
// a null integrity could mean either a delete was appended
|
||||
// or the user has simply stored an index that does not map
|
||||
// to any content. we determine if the user wants to keep the
|
||||
// null integrity based on the validateEntry function passed in options.
|
||||
// if the integrity is null and no validateEntry is provided, we break
|
||||
// as we consider the null integrity to be a deletion of everything
|
||||
// that came before it.
|
||||
if (entry.integrity === null && !opts.validateEntry) {
|
||||
break
|
||||
}
|
||||
|
||||
// if this entry is valid, and it is either the first entry or
|
||||
// the newEntries array doesn't already include an entry that
|
||||
// matches this one based on the provided matchFn, then we add
|
||||
// it to the beginning of our list
|
||||
if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
|
||||
(newEntries.length === 0 ||
|
||||
!newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
|
||||
newEntries.unshift(entry)
|
||||
}
|
||||
}
|
||||
|
||||
const newIndex = '\n' + newEntries.map((entry) => {
|
||||
const stringified = JSON.stringify(entry)
|
||||
const hash = hashEntry(stringified)
|
||||
return `${hash}\t${stringified}`
|
||||
}).join('\n')
|
||||
|
||||
const setup = async () => {
|
||||
const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
|
||||
await fixOwner.mkdirfix(cache, path.dirname(target))
|
||||
return {
|
||||
target,
|
||||
moved: false,
|
||||
}
|
||||
}
|
||||
|
||||
const teardown = async (tmp) => {
|
||||
if (!tmp.moved) {
|
||||
return rimraf(tmp.target)
|
||||
}
|
||||
}
|
||||
|
||||
const write = async (tmp) => {
|
||||
await fs.writeFile(tmp.target, newIndex, { flag: 'wx' })
|
||||
await fixOwner.mkdirfix(cache, path.dirname(bucket))
|
||||
// we use @npmcli/move-file directly here because we
|
||||
// want to overwrite the existing file
|
||||
await moveFile(tmp.target, bucket)
|
||||
tmp.moved = true
|
||||
try {
|
||||
await fixOwner.chownr(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// write the file atomically
|
||||
const tmp = await setup()
|
||||
try {
|
||||
await write(tmp)
|
||||
} finally {
|
||||
await teardown(tmp)
|
||||
}
|
||||
|
||||
// we reverse the list we generated such that the newest
|
||||
// entries come first in order to make looping through them easier
|
||||
// the true passed to formatEntry tells it to keep null
|
||||
// integrity values, if they made it this far it's because
|
||||
// validateEntry returned true, and as such we should return it
|
||||
return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
|
||||
}
|
||||
|
||||
module.exports.insert = insert
|
||||
|
||||
async function insert (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata,
|
||||
}
|
||||
try {
|
||||
await fixOwner.mkdirfix(cache, path.dirname(bucket))
|
||||
const stringified = JSON.stringify(entry)
|
||||
// NOTE - Cleverness ahoy!
|
||||
//
|
||||
// This works because it's tremendously unlikely for an entry to corrupt
|
||||
// another while still preserving the string length of the JSON in
|
||||
// question. So, we just slap the length in there and verify it on read.
|
||||
//
|
||||
// Thanks to @isaacs for the whiteboarding session that ended up with
|
||||
// this.
|
||||
await fs.appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
await fixOwner.chownr(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
|
||||
throw err
|
||||
// There's a class of race conditions that happen when things get deleted
|
||||
// during fixOwner, or between the two mkdirfix/chownr calls.
|
||||
//
|
||||
// It's perfectly fine to just not bother in those cases and lie
|
||||
// that the index entry was written. Because it's a cache.
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.insert.sync = insertSync
|
||||
|
||||
function insertSync (cache, key, integrity, opts = {}) {
|
||||
const { metadata, size } = opts
|
||||
const bucket = bucketPath(cache, key)
|
||||
const entry = {
|
||||
key,
|
||||
integrity: integrity && ssri.stringify(integrity),
|
||||
time: Date.now(),
|
||||
size,
|
||||
metadata,
|
||||
}
|
||||
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
|
||||
const stringified = JSON.stringify(entry)
|
||||
fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
|
||||
try {
|
||||
fixOwner.chownr.sync(cache, bucket)
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
return formatEntry(cache, entry)
|
||||
}
|
||||
|
||||
module.exports.find = find
|
||||
|
||||
async function find (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
const entries = await bucketEntries(bucket)
|
||||
return entries.reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.find.sync = findSync
|
||||
|
||||
function findSync (cache, key) {
|
||||
const bucket = bucketPath(cache, key)
|
||||
try {
|
||||
return bucketEntriesSync(bucket).reduce((latest, next) => {
|
||||
if (next && next.key === key) {
|
||||
return formatEntry(cache, next)
|
||||
} else {
|
||||
return latest
|
||||
}
|
||||
}, null)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.delete = del
|
||||
|
||||
function del (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insert(cache, key, null, opts)
|
||||
}
|
||||
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rimraf(bucket)
|
||||
}
|
||||
|
||||
module.exports.delete.sync = delSync
|
||||
|
||||
function delSync (cache, key, opts = {}) {
|
||||
if (!opts.removeFully) {
|
||||
return insertSync(cache, key, null, opts)
|
||||
}
|
||||
|
||||
const bucket = bucketPath(cache, key)
|
||||
return rimraf.sync(bucket)
|
||||
}
|
||||
|
||||
module.exports.lsStream = lsStream
|
||||
|
||||
function lsStream (cache) {
|
||||
const indexDir = bucketDir(cache)
|
||||
const stream = new Minipass({ objectMode: true })
|
||||
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const buckets = await readdirOrEmpty(indexDir)
|
||||
await Promise.all(buckets.map(async (bucket) => {
|
||||
const bucketPath = path.join(indexDir, bucket)
|
||||
const subbuckets = await readdirOrEmpty(bucketPath)
|
||||
await Promise.all(subbuckets.map(async (subbucket) => {
|
||||
const subbucketPath = path.join(bucketPath, subbucket)
|
||||
|
||||
// "/cachename/<bucket 0xFF>/<bucket 0xFF>./*"
|
||||
const subbucketEntries = await readdirOrEmpty(subbucketPath)
|
||||
await Promise.all(subbucketEntries.map(async (entry) => {
|
||||
const entryPath = path.join(subbucketPath, entry)
|
||||
try {
|
||||
const entries = await bucketEntries(entryPath)
|
||||
// using a Map here prevents duplicate keys from showing up
|
||||
// twice, I guess?
|
||||
const reduced = entries.reduce((acc, entry) => {
|
||||
acc.set(entry.key, entry)
|
||||
return acc
|
||||
}, new Map())
|
||||
// reduced is a map of key => entry
|
||||
for (const entry of reduced.values()) {
|
||||
const formatted = formatEntry(cache, entry)
|
||||
if (formatted) {
|
||||
stream.write(formatted)
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return undefined
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}))
|
||||
}))
|
||||
}))
|
||||
stream.end()
|
||||
return stream
|
||||
}).catch(err => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.ls = ls
|
||||
|
||||
async function ls (cache) {
|
||||
const entries = await lsStream(cache).collect()
|
||||
return entries.reduce((acc, xs) => {
|
||||
acc[xs.key] = xs
|
||||
return acc
|
||||
}, {})
|
||||
}
|
||||
|
||||
module.exports.bucketEntries = bucketEntries
|
||||
|
||||
async function bucketEntries (bucket, filter) {
|
||||
const data = await fs.readFile(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
module.exports.bucketEntries.sync = bucketEntriesSync
|
||||
|
||||
function bucketEntriesSync (bucket, filter) {
|
||||
const data = fs.readFileSync(bucket, 'utf8')
|
||||
return _bucketEntries(data, filter)
|
||||
}
|
||||
|
||||
function _bucketEntries (data, filter) {
|
||||
const entries = []
|
||||
data.split('\n').forEach((entry) => {
|
||||
if (!entry) {
|
||||
return
|
||||
}
|
||||
|
||||
const pieces = entry.split('\t')
|
||||
if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
|
||||
// Hash is no good! Corruption or malice? Doesn't matter!
|
||||
// EJECT EJECT
|
||||
return
|
||||
}
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(pieces[1])
|
||||
} catch (e) {
|
||||
// Entry is corrupted!
|
||||
return
|
||||
}
|
||||
if (obj) {
|
||||
entries.push(obj)
|
||||
}
|
||||
})
|
||||
return entries
|
||||
}
|
||||
|
||||
module.exports.bucketDir = bucketDir
|
||||
|
||||
function bucketDir (cache) {
|
||||
return path.join(cache, `index-v${indexV}`)
|
||||
}
|
||||
|
||||
module.exports.bucketPath = bucketPath
|
||||
|
||||
function bucketPath (cache, key) {
|
||||
const hashed = hashKey(key)
|
||||
return path.join.apply(
|
||||
path,
|
||||
[bucketDir(cache)].concat(hashToSegments(hashed))
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.hashKey = hashKey
|
||||
|
||||
function hashKey (key) {
|
||||
return hash(key, 'sha256')
|
||||
}
|
||||
|
||||
module.exports.hashEntry = hashEntry
|
||||
|
||||
function hashEntry (str) {
|
||||
return hash(str, 'sha1')
|
||||
}
|
||||
|
||||
function hash (str, digest) {
|
||||
return crypto
|
||||
.createHash(digest)
|
||||
.update(str)
|
||||
.digest('hex')
|
||||
}
|
||||
|
||||
function formatEntry (cache, entry, keepAll) {
|
||||
// Treat null digests as deletions. They'll shadow any previous entries.
|
||||
if (!entry.integrity && !keepAll) {
|
||||
return null
|
||||
}
|
||||
|
||||
return {
|
||||
key: entry.key,
|
||||
integrity: entry.integrity,
|
||||
path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
|
||||
size: entry.size,
|
||||
time: entry.time,
|
||||
metadata: entry.metadata,
|
||||
}
|
||||
}
|
||||
|
||||
function readdirOrEmpty (dir) {
|
||||
return fs.readdir(dir).catch((err) => {
|
||||
if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
|
||||
return []
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
}
|
||||
225
desktop-operator/node_modules/cacache/lib/get.js
generated
vendored
Normal file
225
desktop-operator/node_modules/cacache/lib/get.js
generated
vendored
Normal file
@@ -0,0 +1,225 @@
|
||||
'use strict'
|
||||
|
||||
const Collect = require('minipass-collect')
|
||||
const Minipass = require('minipass')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const read = require('./content/read')
|
||||
|
||||
async function getData (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = await read(cache, entry.integrity, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return {
|
||||
data,
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
module.exports = getData
|
||||
|
||||
async function getDataByDigest (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = await read(cache, key, { integrity, size })
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, key, res, opts)
|
||||
}
|
||||
return res
|
||||
}
|
||||
module.exports.byDigest = getDataByDigest
|
||||
|
||||
function getDataSync (cache, key, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
|
||||
if (memoized && memoize !== false) {
|
||||
return {
|
||||
metadata: memoized.entry.metadata,
|
||||
data: memoized.data,
|
||||
integrity: memoized.entry.integrity,
|
||||
size: memoized.entry.size,
|
||||
}
|
||||
}
|
||||
const entry = index.find.sync(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
const data = read.sync(cache, entry.integrity, {
|
||||
integrity: integrity,
|
||||
size: size,
|
||||
})
|
||||
const res = {
|
||||
metadata: entry.metadata,
|
||||
data: data,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, res.data, opts)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
module.exports.sync = getDataSync
|
||||
|
||||
function getDataByDigestSync (cache, digest, opts = {}) {
|
||||
const { integrity, memoize, size } = opts
|
||||
const memoized = memo.get.byDigest(cache, digest, opts)
|
||||
|
||||
if (memoized && memoize !== false) {
|
||||
return memoized
|
||||
}
|
||||
|
||||
const res = read.sync(cache, digest, {
|
||||
integrity: integrity,
|
||||
size: size,
|
||||
})
|
||||
if (memoize) {
|
||||
memo.put.byDigest(cache, digest, res, opts)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
module.exports.sync.byDigest = getDataByDigestSync
|
||||
|
||||
const getMemoizedStream = (memoized) => {
|
||||
const stream = new Minipass()
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(memoized.entry.metadata)
|
||||
ev === 'integrity' && cb(memoized.entry.integrity)
|
||||
ev === 'size' && cb(memoized.entry.size)
|
||||
})
|
||||
stream.end(memoized.data)
|
||||
return stream
|
||||
}
|
||||
|
||||
function getStream (cache, key, opts = {}) {
|
||||
const { memoize, size } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return getMemoizedStream(memoized)
|
||||
}
|
||||
|
||||
const stream = new Pipeline()
|
||||
// Set all this up to run on the stream and then just return the stream
|
||||
Promise.resolve().then(async () => {
|
||||
const entry = await index.find(cache, key)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
|
||||
stream.emit('metadata', entry.metadata)
|
||||
stream.emit('integrity', entry.integrity)
|
||||
stream.emit('size', entry.size)
|
||||
stream.on('newListener', function (ev, cb) {
|
||||
ev === 'metadata' && cb(entry.metadata)
|
||||
ev === 'integrity' && cb(entry.integrity)
|
||||
ev === 'size' && cb(entry.size)
|
||||
})
|
||||
|
||||
const src = read.readStream(
|
||||
cache,
|
||||
entry.integrity,
|
||||
{ ...opts, size: typeof size !== 'number' ? entry.size : size }
|
||||
)
|
||||
|
||||
if (memoize) {
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put(cache, entry, data, opts))
|
||||
stream.unshift(memoStream)
|
||||
}
|
||||
stream.unshift(src)
|
||||
return stream
|
||||
}).catch((err) => stream.emit('error', err))
|
||||
|
||||
return stream
|
||||
}
|
||||
|
||||
module.exports.stream = getStream
|
||||
|
||||
function getStreamDigest (cache, integrity, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get.byDigest(cache, integrity, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
const stream = new Minipass()
|
||||
stream.end(memoized)
|
||||
return stream
|
||||
} else {
|
||||
const stream = read.readStream(cache, integrity, opts)
|
||||
if (!memoize) {
|
||||
return stream
|
||||
}
|
||||
|
||||
const memoStream = new Collect.PassThrough()
|
||||
memoStream.on('collect', data => memo.put.byDigest(
|
||||
cache,
|
||||
integrity,
|
||||
data,
|
||||
opts
|
||||
))
|
||||
return new Pipeline(stream, memoStream)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.stream.byDigest = getStreamDigest
|
||||
|
||||
function info (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
const memoized = memo.get(cache, key, opts)
|
||||
if (memoized && memoize !== false) {
|
||||
return Promise.resolve(memoized.entry)
|
||||
} else {
|
||||
return index.find(cache, key)
|
||||
}
|
||||
}
|
||||
module.exports.info = info
|
||||
|
||||
async function copy (cache, key, dest, opts = {}) {
|
||||
const entry = await index.find(cache, key, opts)
|
||||
if (!entry) {
|
||||
throw new index.NotFoundError(cache, key)
|
||||
}
|
||||
await read.copy(cache, entry.integrity, dest, opts)
|
||||
return {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
integrity: entry.integrity,
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.copy = copy
|
||||
|
||||
async function copyByDigest (cache, key, dest, opts = {}) {
|
||||
await read.copy(cache, key, dest, opts)
|
||||
return key
|
||||
}
|
||||
|
||||
module.exports.copy.byDigest = copyByDigest
|
||||
|
||||
module.exports.hasContent = read.hasContent
|
||||
45
desktop-operator/node_modules/cacache/lib/index.js
generated
vendored
Normal file
45
desktop-operator/node_modules/cacache/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
'use strict'
|
||||
|
||||
const get = require('./get.js')
|
||||
const put = require('./put.js')
|
||||
const rm = require('./rm.js')
|
||||
const verify = require('./verify.js')
|
||||
const { clearMemoized } = require('./memoization.js')
|
||||
const tmp = require('./util/tmp.js')
|
||||
const index = require('./entry-index.js')
|
||||
|
||||
module.exports.index = {}
|
||||
module.exports.index.compact = index.compact
|
||||
module.exports.index.insert = index.insert
|
||||
|
||||
module.exports.ls = index.ls
|
||||
module.exports.ls.stream = index.lsStream
|
||||
|
||||
module.exports.get = get
|
||||
module.exports.get.byDigest = get.byDigest
|
||||
module.exports.get.sync = get.sync
|
||||
module.exports.get.sync.byDigest = get.sync.byDigest
|
||||
module.exports.get.stream = get.stream
|
||||
module.exports.get.stream.byDigest = get.stream.byDigest
|
||||
module.exports.get.copy = get.copy
|
||||
module.exports.get.copy.byDigest = get.copy.byDigest
|
||||
module.exports.get.info = get.info
|
||||
module.exports.get.hasContent = get.hasContent
|
||||
module.exports.get.hasContent.sync = get.hasContent.sync
|
||||
|
||||
module.exports.put = put
|
||||
module.exports.put.stream = put.stream
|
||||
|
||||
module.exports.rm = rm.entry
|
||||
module.exports.rm.all = rm.all
|
||||
module.exports.rm.entry = module.exports.rm
|
||||
module.exports.rm.content = rm.content
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
module.exports.tmp = {}
|
||||
module.exports.tmp.mkdir = tmp.mkdir
|
||||
module.exports.tmp.withTmp = tmp.withTmp
|
||||
|
||||
module.exports.verify = verify
|
||||
module.exports.verify.lastRun = verify.lastRun
|
||||
72
desktop-operator/node_modules/cacache/lib/memoization.js
generated
vendored
Normal file
72
desktop-operator/node_modules/cacache/lib/memoization.js
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
'use strict'
|
||||
|
||||
const LRU = require('lru-cache')
|
||||
|
||||
const MEMOIZED = new LRU({
|
||||
max: 500,
|
||||
maxSize: 50 * 1024 * 1024, // 50MB
|
||||
ttl: 3 * 60 * 1000, // 3 minutes
|
||||
sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
|
||||
})
|
||||
|
||||
module.exports.clearMemoized = clearMemoized
|
||||
|
||||
function clearMemoized () {
|
||||
const old = {}
|
||||
MEMOIZED.forEach((v, k) => {
|
||||
old[k] = v
|
||||
})
|
||||
MEMOIZED.clear()
|
||||
return old
|
||||
}
|
||||
|
||||
module.exports.put = put
|
||||
|
||||
function put (cache, entry, data, opts) {
|
||||
pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
|
||||
putDigest(cache, entry.integrity, data, opts)
|
||||
}
|
||||
|
||||
module.exports.put.byDigest = putDigest
|
||||
|
||||
function putDigest (cache, integrity, data, opts) {
|
||||
pickMem(opts).set(`digest:${cache}:${integrity}`, data)
|
||||
}
|
||||
|
||||
module.exports.get = get
|
||||
|
||||
function get (cache, key, opts) {
|
||||
return pickMem(opts).get(`key:${cache}:${key}`)
|
||||
}
|
||||
|
||||
module.exports.get.byDigest = getDigest
|
||||
|
||||
function getDigest (cache, integrity, opts) {
|
||||
return pickMem(opts).get(`digest:${cache}:${integrity}`)
|
||||
}
|
||||
|
||||
class ObjProxy {
|
||||
constructor (obj) {
|
||||
this.obj = obj
|
||||
}
|
||||
|
||||
get (key) {
|
||||
return this.obj[key]
|
||||
}
|
||||
|
||||
set (key, val) {
|
||||
this.obj[key] = val
|
||||
}
|
||||
}
|
||||
|
||||
function pickMem (opts) {
|
||||
if (!opts || !opts.memoize) {
|
||||
return MEMOIZED
|
||||
} else if (opts.memoize.get && opts.memoize.set) {
|
||||
return opts.memoize
|
||||
} else if (typeof opts.memoize === 'object') {
|
||||
return new ObjProxy(opts.memoize)
|
||||
} else {
|
||||
return MEMOIZED
|
||||
}
|
||||
}
|
||||
80
desktop-operator/node_modules/cacache/lib/put.js
generated
vendored
Normal file
80
desktop-operator/node_modules/cacache/lib/put.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
'use strict'
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const write = require('./content/write')
|
||||
const Flush = require('minipass-flush')
|
||||
const { PassThrough } = require('minipass-collect')
|
||||
const Pipeline = require('minipass-pipeline')
|
||||
|
||||
const putOpts = (opts) => ({
|
||||
algorithms: ['sha512'],
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = putData
|
||||
|
||||
async function putData (cache, key, data, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
const res = await write(cache, data, opts)
|
||||
const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
|
||||
if (memoize) {
|
||||
memo.put(cache, entry, data, opts)
|
||||
}
|
||||
|
||||
return res.integrity
|
||||
}
|
||||
|
||||
module.exports.stream = putStream
|
||||
|
||||
function putStream (cache, key, opts = {}) {
|
||||
const { memoize } = opts
|
||||
opts = putOpts(opts)
|
||||
let integrity
|
||||
let size
|
||||
let error
|
||||
|
||||
let memoData
|
||||
const pipeline = new Pipeline()
|
||||
// first item in the pipeline is the memoizer, because we need
|
||||
// that to end first and get the collected data.
|
||||
if (memoize) {
|
||||
const memoizer = new PassThrough().on('collect', data => {
|
||||
memoData = data
|
||||
})
|
||||
pipeline.push(memoizer)
|
||||
}
|
||||
|
||||
// contentStream is a write-only, not a passthrough
|
||||
// no data comes out of it.
|
||||
const contentStream = write.stream(cache, opts)
|
||||
.on('integrity', (int) => {
|
||||
integrity = int
|
||||
})
|
||||
.on('size', (s) => {
|
||||
size = s
|
||||
})
|
||||
.on('error', (err) => {
|
||||
error = err
|
||||
})
|
||||
|
||||
pipeline.push(contentStream)
|
||||
|
||||
// last but not least, we write the index and emit hash and size,
|
||||
// and memoize if we're doing that
|
||||
pipeline.push(new Flush({
|
||||
async flush () {
|
||||
if (!error) {
|
||||
const entry = await index.insert(cache, key, integrity, { ...opts, size })
|
||||
if (memoize && memoData) {
|
||||
memo.put(cache, entry, memoData, opts)
|
||||
}
|
||||
pipeline.emit('integrity', integrity)
|
||||
pipeline.emit('size', size)
|
||||
}
|
||||
},
|
||||
}))
|
||||
|
||||
return pipeline
|
||||
}
|
||||
31
desktop-operator/node_modules/cacache/lib/rm.js
generated
vendored
Normal file
31
desktop-operator/node_modules/cacache/lib/rm.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const index = require('./entry-index')
|
||||
const memo = require('./memoization')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const rmContent = require('./content/rm')
|
||||
|
||||
module.exports = entry
|
||||
module.exports.entry = entry
|
||||
|
||||
function entry (cache, key, opts) {
|
||||
memo.clearMemoized()
|
||||
return index.delete(cache, key, opts)
|
||||
}
|
||||
|
||||
module.exports.content = content
|
||||
|
||||
function content (cache, integrity) {
|
||||
memo.clearMemoized()
|
||||
return rmContent(cache, integrity)
|
||||
}
|
||||
|
||||
module.exports.all = all
|
||||
|
||||
function all (cache) {
|
||||
memo.clearMemoized()
|
||||
return rimraf(path.join(cache, '*(content-*|index-*)'))
|
||||
}
|
||||
145
desktop-operator/node_modules/cacache/lib/util/fix-owner.js
generated
vendored
Normal file
145
desktop-operator/node_modules/cacache/lib/util/fix-owner.js
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const chownr = util.promisify(require('chownr'))
|
||||
const mkdirp = require('mkdirp')
|
||||
const inflight = require('promise-inflight')
|
||||
const inferOwner = require('infer-owner')
|
||||
|
||||
// Memoize getuid()/getgid() calls.
|
||||
// patch process.setuid/setgid to invalidate cached value on change
|
||||
const self = { uid: null, gid: null }
|
||||
const getSelf = () => {
|
||||
if (typeof self.uid !== 'number') {
|
||||
self.uid = process.getuid()
|
||||
const setuid = process.setuid
|
||||
process.setuid = (uid) => {
|
||||
self.uid = null
|
||||
process.setuid = setuid
|
||||
return process.setuid(uid)
|
||||
}
|
||||
}
|
||||
if (typeof self.gid !== 'number') {
|
||||
self.gid = process.getgid()
|
||||
const setgid = process.setgid
|
||||
process.setgid = (gid) => {
|
||||
self.gid = null
|
||||
process.setgid = setgid
|
||||
return process.setgid(gid)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.chownr = fixOwner
|
||||
|
||||
async function fixOwner (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return
|
||||
}
|
||||
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return
|
||||
}
|
||||
|
||||
const { uid, gid } = await inferOwner(cache)
|
||||
|
||||
// No need to override if it's already what we used.
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
return
|
||||
}
|
||||
|
||||
return inflight('fixOwner: fixing ownership on ' + filepath, () =>
|
||||
chownr(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
).catch((err) => {
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
|
||||
throw err
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
module.exports.chownr.sync = fixOwnerSync
|
||||
|
||||
function fixOwnerSync (cache, filepath) {
|
||||
if (!process.getuid) {
|
||||
// This platform doesn't need ownership fixing
|
||||
return
|
||||
}
|
||||
const { uid, gid } = inferOwner.sync(cache)
|
||||
getSelf()
|
||||
if (self.uid !== 0) {
|
||||
// almost certainly can't chown anyway
|
||||
return
|
||||
}
|
||||
|
||||
if (self.uid === uid && self.gid === gid) {
|
||||
// No need to override if it's already what we used.
|
||||
return
|
||||
}
|
||||
try {
|
||||
chownr.sync(
|
||||
filepath,
|
||||
typeof uid === 'number' ? uid : self.uid,
|
||||
typeof gid === 'number' ? gid : self.gid
|
||||
)
|
||||
} catch (err) {
|
||||
// only catch ENOENT, any other error is a problem.
|
||||
if (err.code === 'ENOENT') {
|
||||
return null
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.mkdirfix = mkdirfix
|
||||
|
||||
async function mkdirfix (cache, p, cb) {
|
||||
// we have to infer the owner _before_ making the directory, even though
|
||||
// we aren't going to use the results, since the cache itself might not
|
||||
// exist yet. If we mkdirp it, then our current uid/gid will be assumed
|
||||
// to be correct if it creates the cache folder in the process.
|
||||
await inferOwner(cache)
|
||||
try {
|
||||
const made = await mkdirp(p)
|
||||
if (made) {
|
||||
await fixOwner(cache, made)
|
||||
return made
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
await fixOwner(cache, p)
|
||||
return null
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.mkdirfix.sync = mkdirfixSync
|
||||
|
||||
function mkdirfixSync (cache, p) {
|
||||
try {
|
||||
inferOwner.sync(cache)
|
||||
const made = mkdirp.sync(p)
|
||||
if (made) {
|
||||
fixOwnerSync(cache, made)
|
||||
return made
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'EEXIST') {
|
||||
fixOwnerSync(cache, p)
|
||||
return null
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
7
desktop-operator/node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Normal file
7
desktop-operator/node_modules/cacache/lib/util/hash-to-segments.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = hashToSegments
|
||||
|
||||
function hashToSegments (hash) {
|
||||
return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
|
||||
}
|
||||
56
desktop-operator/node_modules/cacache/lib/util/move-file.js
generated
vendored
Normal file
56
desktop-operator/node_modules/cacache/lib/util/move-file.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
const move = require('@npmcli/move-file')
|
||||
const pinflight = require('promise-inflight')
|
||||
|
||||
module.exports = moveFile
|
||||
|
||||
async function moveFile (src, dest) {
|
||||
const isWindows = process.platform === 'win32'
|
||||
|
||||
// This isn't quite an fs.rename -- the assumption is that
|
||||
// if `dest` already exists, and we get certain errors while
|
||||
// trying to move it, we should just not bother.
|
||||
//
|
||||
// In the case of cache corruption, users will receive an
|
||||
// EINTEGRITY error elsewhere, and can remove the offending
|
||||
// content their own way.
|
||||
//
|
||||
// Note that, as the name suggests, this strictly only supports file moves.
|
||||
try {
|
||||
await fs.link(src, dest)
|
||||
} catch (err) {
|
||||
if (isWindows && err.code === 'EPERM') {
|
||||
// XXX This is a really weird way to handle this situation, as it
|
||||
// results in the src file being deleted even though the dest
|
||||
// might not exist. Since we pretty much always write files to
|
||||
// deterministic locations based on content hash, this is likely
|
||||
// ok (or at worst, just ends in a future cache miss). But it would
|
||||
// be worth investigating at some time in the future if this is
|
||||
// really what we want to do here.
|
||||
} else if (err.code === 'EEXIST' || err.code === 'EBUSY') {
|
||||
// file already exists, so whatever
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
try {
|
||||
await Promise.all([
|
||||
fs.unlink(src),
|
||||
!isWindows && fs.chmod(dest, '0444'),
|
||||
])
|
||||
} catch (e) {
|
||||
return pinflight('cacache-move-file:' + dest, async () => {
|
||||
await fs.stat(dest).catch((err) => {
|
||||
if (err.code !== 'ENOENT') {
|
||||
// Something else is wrong here. Bail bail bail
|
||||
throw err
|
||||
}
|
||||
})
|
||||
// file doesn't already exist! let's try a rename -> copy fallback
|
||||
// only delete if it successfully copies
|
||||
return move(src, dest)
|
||||
})
|
||||
}
|
||||
}
|
||||
33
desktop-operator/node_modules/cacache/lib/util/tmp.js
generated
vendored
Normal file
33
desktop-operator/node_modules/cacache/lib/util/tmp.js
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('@npmcli/fs')
|
||||
|
||||
const fixOwner = require('./fix-owner')
|
||||
const path = require('path')
|
||||
|
||||
module.exports.mkdir = mktmpdir
|
||||
|
||||
async function mktmpdir (cache, opts = {}) {
|
||||
const { tmpPrefix } = opts
|
||||
const tmpDir = path.join(cache, 'tmp')
|
||||
await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
|
||||
// do not use path.join(), it drops the trailing / if tmpPrefix is unset
|
||||
const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
|
||||
return fs.mkdtemp(target, { owner: 'inherit' })
|
||||
}
|
||||
|
||||
module.exports.withTmp = withTmp
|
||||
|
||||
function withTmp (cache, opts, cb) {
|
||||
if (!cb) {
|
||||
cb = opts
|
||||
opts = {}
|
||||
}
|
||||
return fs.withTempDir(path.join(cache, 'tmp'), cb, opts)
|
||||
}
|
||||
|
||||
module.exports.fix = fixtmpdir
|
||||
|
||||
function fixtmpdir (cache) {
|
||||
return fixOwner(cache, path.join(cache, 'tmp'))
|
||||
}
|
||||
257
desktop-operator/node_modules/cacache/lib/verify.js
generated
vendored
Normal file
257
desktop-operator/node_modules/cacache/lib/verify.js
generated
vendored
Normal file
@@ -0,0 +1,257 @@
|
||||
'use strict'
|
||||
|
||||
const util = require('util')
|
||||
|
||||
const pMap = require('p-map')
|
||||
const contentPath = require('./content/path')
|
||||
const fixOwner = require('./util/fix-owner')
|
||||
const fs = require('@npmcli/fs')
|
||||
const fsm = require('fs-minipass')
|
||||
const glob = util.promisify(require('glob'))
|
||||
const index = require('./entry-index')
|
||||
const path = require('path')
|
||||
const rimraf = util.promisify(require('rimraf'))
|
||||
const ssri = require('ssri')
|
||||
|
||||
const globify = pattern => pattern.split('\\').join('/')
|
||||
|
||||
const hasOwnProperty = (obj, key) =>
|
||||
Object.prototype.hasOwnProperty.call(obj, key)
|
||||
|
||||
const verifyOpts = (opts) => ({
|
||||
concurrency: 20,
|
||||
log: { silly () {} },
|
||||
...opts,
|
||||
})
|
||||
|
||||
module.exports = verify
|
||||
|
||||
async function verify (cache, opts) {
|
||||
opts = verifyOpts(opts)
|
||||
opts.log.silly('verify', 'verifying cache at', cache)
|
||||
|
||||
const steps = [
|
||||
markStartTime,
|
||||
fixPerms,
|
||||
garbageCollect,
|
||||
rebuildIndex,
|
||||
cleanTmp,
|
||||
writeVerifile,
|
||||
markEndTime,
|
||||
]
|
||||
|
||||
const stats = {}
|
||||
for (const step of steps) {
|
||||
const label = step.name
|
||||
const start = new Date()
|
||||
const s = await step(cache, opts)
|
||||
if (s) {
|
||||
Object.keys(s).forEach((k) => {
|
||||
stats[k] = s[k]
|
||||
})
|
||||
}
|
||||
const end = new Date()
|
||||
if (!stats.runTime) {
|
||||
stats.runTime = {}
|
||||
}
|
||||
stats.runTime[label] = end - start
|
||||
}
|
||||
stats.runTime.total = stats.endTime - stats.startTime
|
||||
opts.log.silly(
|
||||
'verify',
|
||||
'verification finished for',
|
||||
cache,
|
||||
'in',
|
||||
`${stats.runTime.total}ms`
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function markStartTime (cache, opts) {
|
||||
return { startTime: new Date() }
|
||||
}
|
||||
|
||||
async function markEndTime (cache, opts) {
|
||||
return { endTime: new Date() }
|
||||
}
|
||||
|
||||
async function fixPerms (cache, opts) {
|
||||
opts.log.silly('verify', 'fixing cache permissions')
|
||||
await fixOwner.mkdirfix(cache, cache)
|
||||
// TODO - fix file permissions too
|
||||
await fixOwner.chownr(cache, cache)
|
||||
return null
|
||||
}
|
||||
|
||||
// Implements a naive mark-and-sweep tracing garbage collector.
|
||||
//
|
||||
// The algorithm is basically as follows:
|
||||
// 1. Read (and filter) all index entries ("pointers")
|
||||
// 2. Mark each integrity value as "live"
|
||||
// 3. Read entire filesystem tree in `content-vX/` dir
|
||||
// 4. If content is live, verify its checksum and delete it if it fails
|
||||
// 5. If content is not marked as live, rimraf it.
|
||||
//
|
||||
async function garbageCollect (cache, opts) {
|
||||
opts.log.silly('verify', 'garbage collecting content')
|
||||
const indexStream = index.lsStream(cache)
|
||||
const liveContent = new Set()
|
||||
indexStream.on('data', (entry) => {
|
||||
if (opts.filter && !opts.filter(entry)) {
|
||||
return
|
||||
}
|
||||
|
||||
liveContent.add(entry.integrity.toString())
|
||||
})
|
||||
await new Promise((resolve, reject) => {
|
||||
indexStream.on('end', resolve).on('error', reject)
|
||||
})
|
||||
const contentDir = contentPath.contentDir(cache)
|
||||
const files = await glob(globify(path.join(contentDir, '**')), {
|
||||
follow: false,
|
||||
nodir: true,
|
||||
nosort: true,
|
||||
})
|
||||
const stats = {
|
||||
verifiedContent: 0,
|
||||
reclaimedCount: 0,
|
||||
reclaimedSize: 0,
|
||||
badContentCount: 0,
|
||||
keptSize: 0,
|
||||
}
|
||||
await pMap(
|
||||
files,
|
||||
async (f) => {
|
||||
const split = f.split(/[/\\]/)
|
||||
const digest = split.slice(split.length - 3).join('')
|
||||
const algo = split[split.length - 4]
|
||||
const integrity = ssri.fromHex(digest, algo)
|
||||
if (liveContent.has(integrity.toString())) {
|
||||
const info = await verifyContent(f, integrity)
|
||||
if (!info.valid) {
|
||||
stats.reclaimedCount++
|
||||
stats.badContentCount++
|
||||
stats.reclaimedSize += info.size
|
||||
} else {
|
||||
stats.verifiedContent++
|
||||
stats.keptSize += info.size
|
||||
}
|
||||
} else {
|
||||
// No entries refer to this content. We can delete.
|
||||
stats.reclaimedCount++
|
||||
const s = await fs.stat(f)
|
||||
await rimraf(f)
|
||||
stats.reclaimedSize += s.size
|
||||
}
|
||||
return stats
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function verifyContent (filepath, sri) {
|
||||
const contentInfo = {}
|
||||
try {
|
||||
const { size } = await fs.stat(filepath)
|
||||
contentInfo.size = size
|
||||
contentInfo.valid = true
|
||||
await ssri.checkStream(new fsm.ReadStream(filepath), sri)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return { size: 0, valid: false }
|
||||
}
|
||||
if (err.code !== 'EINTEGRITY') {
|
||||
throw err
|
||||
}
|
||||
|
||||
await rimraf(filepath)
|
||||
contentInfo.valid = false
|
||||
}
|
||||
return contentInfo
|
||||
}
|
||||
|
||||
async function rebuildIndex (cache, opts) {
|
||||
opts.log.silly('verify', 'rebuilding index')
|
||||
const entries = await index.ls(cache)
|
||||
const stats = {
|
||||
missingContent: 0,
|
||||
rejectedEntries: 0,
|
||||
totalEntries: 0,
|
||||
}
|
||||
const buckets = {}
|
||||
for (const k in entries) {
|
||||
/* istanbul ignore else */
|
||||
if (hasOwnProperty(entries, k)) {
|
||||
const hashed = index.hashKey(k)
|
||||
const entry = entries[k]
|
||||
const excluded = opts.filter && !opts.filter(entry)
|
||||
excluded && stats.rejectedEntries++
|
||||
if (buckets[hashed] && !excluded) {
|
||||
buckets[hashed].push(entry)
|
||||
} else if (buckets[hashed] && excluded) {
|
||||
// skip
|
||||
} else if (excluded) {
|
||||
buckets[hashed] = []
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
} else {
|
||||
buckets[hashed] = [entry]
|
||||
buckets[hashed]._path = index.bucketPath(cache, k)
|
||||
}
|
||||
}
|
||||
}
|
||||
await pMap(
|
||||
Object.keys(buckets),
|
||||
(key) => {
|
||||
return rebuildBucket(cache, buckets[key], stats, opts)
|
||||
},
|
||||
{ concurrency: opts.concurrency }
|
||||
)
|
||||
return stats
|
||||
}
|
||||
|
||||
async function rebuildBucket (cache, bucket, stats, opts) {
|
||||
await fs.truncate(bucket._path)
|
||||
// This needs to be serialized because cacache explicitly
|
||||
// lets very racy bucket conflicts clobber each other.
|
||||
for (const entry of bucket) {
|
||||
const content = contentPath(cache, entry.integrity)
|
||||
try {
|
||||
await fs.stat(content)
|
||||
await index.insert(cache, entry.key, entry.integrity, {
|
||||
metadata: entry.metadata,
|
||||
size: entry.size,
|
||||
})
|
||||
stats.totalEntries++
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
stats.rejectedEntries++
|
||||
stats.missingContent++
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function cleanTmp (cache, opts) {
|
||||
opts.log.silly('verify', 'cleaning tmp directory')
|
||||
return rimraf(path.join(cache, 'tmp'))
|
||||
}
|
||||
|
||||
function writeVerifile (cache, opts) {
|
||||
const verifile = path.join(cache, '_lastverified')
|
||||
opts.log.silly('verify', 'writing verifile to ' + verifile)
|
||||
try {
|
||||
return fs.writeFile(verifile, `${Date.now()}`)
|
||||
} finally {
|
||||
fixOwner.chownr.sync(cache, verifile)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.lastRun = lastRun
|
||||
|
||||
async function lastRun (cache) {
|
||||
const data = await fs.readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
|
||||
return new Date(+data)
|
||||
}
|
||||
2
desktop-operator/node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
2
desktop-operator/node_modules/cacache/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
tidelift: "npm/brace-expansion"
|
||||
patreon: juliangruber
|
||||
21
desktop-operator/node_modules/cacache/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
21
desktop-operator/node_modules/cacache/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
135
desktop-operator/node_modules/cacache/node_modules/brace-expansion/README.md
generated
vendored
Normal file
135
desktop-operator/node_modules/cacache/node_modules/brace-expansion/README.md
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
# brace-expansion
|
||||
|
||||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
||||
as known from sh/bash, in JavaScript.
|
||||
|
||||
[](http://travis-ci.org/juliangruber/brace-expansion)
|
||||
[](https://www.npmjs.org/package/brace-expansion)
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
[](https://ci.testling.com/juliangruber/brace-expansion)
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
|
||||
expand('file-{a,b,c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('-v{,,}')
|
||||
// => ['-v', '-v', '-v']
|
||||
|
||||
expand('file{0..2}.jpg')
|
||||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
||||
|
||||
expand('file-{a..c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('file{2..0}.jpg')
|
||||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
||||
|
||||
expand('file{0..4..2}.jpg')
|
||||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
||||
|
||||
expand('file-{a..e..2}.jpg')
|
||||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
||||
|
||||
expand('file{00..10..5}.jpg')
|
||||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
||||
|
||||
expand('{{A..C},{a..c}}')
|
||||
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
||||
|
||||
expand('ppp{,config,oe{,conf}}')
|
||||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
```
|
||||
|
||||
### var expanded = expand(str)
|
||||
|
||||
Return an array of all possible and valid expansions of `str`. If none are
|
||||
found, `[str]` is returned.
|
||||
|
||||
Valid expansions are:
|
||||
|
||||
```js
|
||||
/^(.*,)+(.+)?$/
|
||||
// {a,b,...}
|
||||
```
|
||||
|
||||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
||||
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
||||
to have equal length. Negative numbers and backwards iteration work too.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
||||
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
||||
number.
|
||||
|
||||
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```bash
|
||||
npm install brace-expansion
|
||||
```
|
||||
|
||||
## Contributors
|
||||
|
||||
- [Julian Gruber](https://github.com/juliangruber)
|
||||
- [Isaac Z. Schlueter](https://github.com/isaacs)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
|
||||
|
||||
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
|
||||
|
||||
## Security contact information
|
||||
|
||||
To report a security vulnerability, please use the
|
||||
[Tidelift security contact](https://tidelift.com/security).
|
||||
Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
203
desktop-operator/node_modules/cacache/node_modules/brace-expansion/index.js
generated
vendored
Normal file
203
desktop-operator/node_modules/cacache/node_modules/brace-expansion/index.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
var balanced = require('balanced-match');
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||||
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||||
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||||
var escComma = '\0COMMA'+Math.random()+'\0';
|
||||
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||||
|
||||
function numeric(str) {
|
||||
return parseInt(str, 10) == str
|
||||
? parseInt(str, 10)
|
||||
: str.charCodeAt(0);
|
||||
}
|
||||
|
||||
function escapeBraces(str) {
|
||||
return str.split('\\\\').join(escSlash)
|
||||
.split('\\{').join(escOpen)
|
||||
.split('\\}').join(escClose)
|
||||
.split('\\,').join(escComma)
|
||||
.split('\\.').join(escPeriod);
|
||||
}
|
||||
|
||||
function unescapeBraces(str) {
|
||||
return str.split(escSlash).join('\\')
|
||||
.split(escOpen).join('{')
|
||||
.split(escClose).join('}')
|
||||
.split(escComma).join(',')
|
||||
.split(escPeriod).join('.');
|
||||
}
|
||||
|
||||
|
||||
// Basically just str.split(","), but handling cases
|
||||
// where we have nested braced sections, which should be
|
||||
// treated as individual members, like {a,{b,c},d}
|
||||
function parseCommaParts(str) {
|
||||
if (!str)
|
||||
return [''];
|
||||
|
||||
var parts = [];
|
||||
var m = balanced('{', '}', str);
|
||||
|
||||
if (!m)
|
||||
return str.split(',');
|
||||
|
||||
var pre = m.pre;
|
||||
var body = m.body;
|
||||
var post = m.post;
|
||||
var p = pre.split(',');
|
||||
|
||||
p[p.length-1] += '{' + body + '}';
|
||||
var postParts = parseCommaParts(post);
|
||||
if (post.length) {
|
||||
p[p.length-1] += postParts.shift();
|
||||
p.push.apply(p, postParts);
|
||||
}
|
||||
|
||||
parts.push.apply(parts, p);
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function expandTop(str) {
|
||||
if (!str)
|
||||
return [];
|
||||
|
||||
// I don't know why Bash 4.3 does this, but it does.
|
||||
// Anything starting with {} will have the first two bytes preserved
|
||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
||||
// but a{},b}c will be expanded to [a}c,abc].
|
||||
// One could argue that this is a bug in Bash, but since the goal of
|
||||
// this module is to match Bash's rules, we escape a leading {}
|
||||
if (str.substr(0, 2) === '{}') {
|
||||
str = '\\{\\}' + str.substr(2);
|
||||
}
|
||||
|
||||
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||||
}
|
||||
|
||||
function embrace(str) {
|
||||
return '{' + str + '}';
|
||||
}
|
||||
function isPadded(el) {
|
||||
return /^-?0\d/.test(el);
|
||||
}
|
||||
|
||||
function lte(i, y) {
|
||||
return i <= y;
|
||||
}
|
||||
function gte(i, y) {
|
||||
return i >= y;
|
||||
}
|
||||
|
||||
function expand(str, isTop) {
|
||||
var expansions = [];
|
||||
|
||||
var m = balanced('{', '}', str);
|
||||
if (!m) return [str];
|
||||
|
||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||||
var pre = m.pre;
|
||||
var post = m.post.length
|
||||
? expand(m.post, false)
|
||||
: [''];
|
||||
|
||||
if (/\$$/.test(m.pre)) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre+ '{' + m.body + '}' + post[k];
|
||||
expansions.push(expansion);
|
||||
}
|
||||
} else {
|
||||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
var n;
|
||||
if (isSequence) {
|
||||
n = m.body.split(/\.\./);
|
||||
} else {
|
||||
n = parseCommaParts(m.body);
|
||||
if (n.length === 1) {
|
||||
// x{{a,b}}y ==> x{a}y x{b}y
|
||||
n = expand(n[0], false).map(embrace);
|
||||
if (n.length === 1) {
|
||||
return post.map(function(p) {
|
||||
return m.pre + n[0] + p;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, n is the parts, and we know it's not a comma set
|
||||
// with a single entry.
|
||||
var N;
|
||||
|
||||
if (isSequence) {
|
||||
var x = numeric(n[0]);
|
||||
var y = numeric(n[1]);
|
||||
var width = Math.max(n[0].length, n[1].length)
|
||||
var incr = n.length == 3
|
||||
? Math.abs(numeric(n[2]))
|
||||
: 1;
|
||||
var test = lte;
|
||||
var reverse = y < x;
|
||||
if (reverse) {
|
||||
incr *= -1;
|
||||
test = gte;
|
||||
}
|
||||
var pad = n.some(isPadded);
|
||||
|
||||
N = [];
|
||||
|
||||
for (var i = x; test(i, y); i += incr) {
|
||||
var c;
|
||||
if (isAlphaSequence) {
|
||||
c = String.fromCharCode(i);
|
||||
if (c === '\\')
|
||||
c = '';
|
||||
} else {
|
||||
c = String(i);
|
||||
if (pad) {
|
||||
var need = width - c.length;
|
||||
if (need > 0) {
|
||||
var z = new Array(need + 1).join('0');
|
||||
if (i < 0)
|
||||
c = '-' + z + c.slice(1);
|
||||
else
|
||||
c = z + c;
|
||||
}
|
||||
}
|
||||
}
|
||||
N.push(c);
|
||||
}
|
||||
} else {
|
||||
N = [];
|
||||
|
||||
for (var j = 0; j < n.length; j++) {
|
||||
N.push.apply(N, expand(n[j], false));
|
||||
}
|
||||
}
|
||||
|
||||
for (var j = 0; j < N.length; j++) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre + N[j] + post[k];
|
||||
if (!isTop || isSequence || expansion)
|
||||
expansions.push(expansion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return expansions;
|
||||
}
|
||||
|
||||
49
desktop-operator/node_modules/cacache/node_modules/brace-expansion/package.json
generated
vendored
Normal file
49
desktop-operator/node_modules/cacache/node_modules/brace-expansion/package.json
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "brace-expansion",
|
||||
"description": "Brace expansion as known from sh/bash",
|
||||
"version": "2.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/brace-expansion.git"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/brace-expansion",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/*.js",
|
||||
"gentest": "bash test/generate.sh",
|
||||
"bench": "matcha test/perf/bench.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@c4312/matcha": "^1.3.1",
|
||||
"tape": "^4.6.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/20..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/25..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
},
|
||||
"publishConfig": {
|
||||
"tag": "2.x"
|
||||
}
|
||||
}
|
||||
15
desktop-operator/node_modules/cacache/node_modules/glob/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/cacache/node_modules/glob/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2009-2022 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
399
desktop-operator/node_modules/cacache/node_modules/glob/README.md
generated
vendored
Normal file
399
desktop-operator/node_modules/cacache/node_modules/glob/README.md
generated
vendored
Normal file
@@ -0,0 +1,399 @@
|
||||
# Glob
|
||||
|
||||
Match files using the patterns the shell uses, like stars and stuff.
|
||||
|
||||
[](https://travis-ci.org/isaacs/node-glob/) [](https://ci.appveyor.com/project/isaacs/node-glob) [](https://coveralls.io/github/isaacs/node-glob?branch=master)
|
||||
|
||||
This is a glob implementation in JavaScript. It uses the `minimatch`
|
||||
library to do its matching.
|
||||
|
||||

|
||||
|
||||
## Usage
|
||||
|
||||
Install with npm
|
||||
|
||||
```
|
||||
npm i glob
|
||||
```
|
||||
|
||||
```javascript
|
||||
var glob = require("glob")
|
||||
|
||||
// options is optional
|
||||
glob("**/*.js", options, function (er, files) {
|
||||
// files is an array of filenames.
|
||||
// If the `nonull` option is set, and nothing
|
||||
// was found, then files is ["**/*.js"]
|
||||
// er is an error object or null.
|
||||
})
|
||||
```
|
||||
|
||||
## Glob Primer
|
||||
|
||||
"Globs" are the patterns you type when you do stuff like `ls *.js` on
|
||||
the command line, or put `build/*` in a `.gitignore` file.
|
||||
|
||||
Before parsing the path part patterns, braced sections are expanded
|
||||
into a set. Braced sections start with `{` and end with `}`, with any
|
||||
number of comma-delimited sections within. Braced sections may contain
|
||||
slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`.
|
||||
|
||||
The following characters have special magic meaning when used in a
|
||||
path portion:
|
||||
|
||||
* `*` Matches 0 or more characters in a single path portion
|
||||
* `?` Matches 1 character
|
||||
* `[...]` Matches a range of characters, similar to a RegExp range.
|
||||
If the first character of the range is `!` or `^` then it matches
|
||||
any character not in the range.
|
||||
* `!(pattern|pattern|pattern)` Matches anything that does not match
|
||||
any of the patterns provided.
|
||||
* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the
|
||||
patterns provided.
|
||||
* `+(pattern|pattern|pattern)` Matches one or more occurrences of the
|
||||
patterns provided.
|
||||
* `*(a|b|c)` Matches zero or more occurrences of the patterns provided
|
||||
* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns
|
||||
provided
|
||||
* `**` If a "globstar" is alone in a path portion, then it matches
|
||||
zero or more directories and subdirectories searching for matches.
|
||||
It does not crawl symlinked directories.
|
||||
|
||||
### Dots
|
||||
|
||||
If a file or directory path portion has a `.` as the first character,
|
||||
then it will not match any glob pattern unless that pattern's
|
||||
corresponding path part also has a `.` as its first character.
|
||||
|
||||
For example, the pattern `a/.*/c` would match the file at `a/.b/c`.
|
||||
However the pattern `a/*/c` would not, because `*` does not start with
|
||||
a dot character.
|
||||
|
||||
You can make glob treat dots as normal characters by setting
|
||||
`dot:true` in the options.
|
||||
|
||||
### Basename Matching
|
||||
|
||||
If you set `matchBase:true` in the options, and the pattern has no
|
||||
slashes in it, then it will seek for any file anywhere in the tree
|
||||
with a matching basename. For example, `*.js` would match
|
||||
`test/simple/basic.js`.
|
||||
|
||||
### Empty Sets
|
||||
|
||||
If no matching files are found, then an empty array is returned. This
|
||||
differs from the shell, where the pattern itself is returned. For
|
||||
example:
|
||||
|
||||
$ echo a*s*d*f
|
||||
a*s*d*f
|
||||
|
||||
To get the bash-style behavior, set the `nonull:true` in the options.
|
||||
|
||||
### See Also:
|
||||
|
||||
* `man sh`
|
||||
* `man bash` (Search for "Pattern Matching")
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
* [minimatch documentation](https://github.com/isaacs/minimatch)
|
||||
|
||||
## glob.hasMagic(pattern, [options])
|
||||
|
||||
Returns `true` if there are any special characters in the pattern, and
|
||||
`false` otherwise.
|
||||
|
||||
Note that the options affect the results. If `noext:true` is set in
|
||||
the options object, then `+(a|b)` will not be considered a magic
|
||||
pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}`
|
||||
then that is considered magical, unless `nobrace:true` is set in the
|
||||
options.
|
||||
|
||||
## glob(pattern, [options], cb)
|
||||
|
||||
* `pattern` `{String}` Pattern to be matched
|
||||
* `options` `{Object}`
|
||||
* `cb` `{Function}`
|
||||
* `err` `{Error | null}`
|
||||
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Perform an asynchronous glob search.
|
||||
|
||||
## glob.sync(pattern, [options])
|
||||
|
||||
* `pattern` `{String}` Pattern to be matched
|
||||
* `options` `{Object}`
|
||||
* return: `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Perform a synchronous glob search.
|
||||
|
||||
## Class: glob.Glob
|
||||
|
||||
Create a Glob object by instantiating the `glob.Glob` class.
|
||||
|
||||
```javascript
|
||||
var Glob = require("glob").Glob
|
||||
var mg = new Glob(pattern, options, cb)
|
||||
```
|
||||
|
||||
It's an EventEmitter, and starts walking the filesystem to find matches
|
||||
immediately.
|
||||
|
||||
### new glob.Glob(pattern, [options], [cb])
|
||||
|
||||
* `pattern` `{String}` pattern to search for
|
||||
* `options` `{Object}`
|
||||
* `cb` `{Function}` Called when an error occurs, or matches are found
|
||||
* `err` `{Error | null}`
|
||||
* `matches` `{Array<String>}` filenames found matching the pattern
|
||||
|
||||
Note that if the `sync` flag is set in the options, then matches will
|
||||
be immediately available on the `g.found` member.
|
||||
|
||||
### Properties
|
||||
|
||||
* `minimatch` The minimatch object that the glob uses.
|
||||
* `options` The options object passed in.
|
||||
* `aborted` Boolean which is set to true when calling `abort()`. There
|
||||
is no way at this time to continue a glob search after aborting, but
|
||||
you can re-use the statCache to avoid having to duplicate syscalls.
|
||||
* `cache` Convenience object. Each field has the following possible
|
||||
values:
|
||||
* `false` - Path does not exist
|
||||
* `true` - Path exists
|
||||
* `'FILE'` - Path exists, and is not a directory
|
||||
* `'DIR'` - Path exists, and is a directory
|
||||
* `[file, entries, ...]` - Path exists, is a directory, and the
|
||||
array value is the results of `fs.readdir`
|
||||
* `statCache` Cache of `fs.stat` results, to prevent statting the same
|
||||
path multiple times.
|
||||
* `symlinks` A record of which paths are symbolic links, which is
|
||||
relevant in resolving `**` patterns.
|
||||
* `realpathCache` An optional object which is passed to `fs.realpath`
|
||||
to minimize unnecessary syscalls. It is stored on the instantiated
|
||||
Glob object, and may be re-used.
|
||||
|
||||
### Events
|
||||
|
||||
* `end` When the matching is finished, this is emitted with all the
|
||||
matches found. If the `nonull` option is set, and no match was found,
|
||||
then the `matches` list contains the original pattern. The matches
|
||||
are sorted, unless the `nosort` flag is set.
|
||||
* `match` Every time a match is found, this is emitted with the specific
|
||||
thing that matched. It is not deduplicated or resolved to a realpath.
|
||||
* `error` Emitted when an unexpected error is encountered, or whenever
|
||||
any fs error occurs if `options.strict` is set.
|
||||
* `abort` When `abort()` is called, this event is raised.
|
||||
|
||||
### Methods
|
||||
|
||||
* `pause` Temporarily stop the search
|
||||
* `resume` Resume the search
|
||||
* `abort` Stop the search forever
|
||||
|
||||
### Options
|
||||
|
||||
All the options that can be passed to Minimatch can also be passed to
|
||||
Glob to change pattern matching behavior. Also, some have been added,
|
||||
or have glob-specific ramifications.
|
||||
|
||||
All options are false by default, unless otherwise noted.
|
||||
|
||||
All options are added to the Glob object, as well.
|
||||
|
||||
If you are running many `glob` operations, you can pass a Glob object
|
||||
as the `options` argument to a subsequent operation to shortcut some
|
||||
`stat` and `readdir` calls. At the very least, you may pass in shared
|
||||
`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that
|
||||
parallel glob operations will be sped up by sharing information about
|
||||
the filesystem.
|
||||
|
||||
* `cwd` The current working directory in which to search. Defaults
|
||||
to `process.cwd()`. This option is always coerced to use
|
||||
forward-slashes as a path separator, because it is not tested
|
||||
as a glob pattern, so there is no need to escape anything.
|
||||
* `root` The place where patterns starting with `/` will be mounted
|
||||
onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix
|
||||
systems, and `C:\` or some such on Windows.) This option is
|
||||
always coerced to use forward-slashes as a path separator,
|
||||
because it is not tested as a glob pattern, so there is no need
|
||||
to escape anything.
|
||||
* `windowsPathsNoEscape` Use `\\` as a path separator _only_, and
|
||||
_never_ as an escape character. If set, all `\\` characters
|
||||
are replaced with `/` in the pattern. Note that this makes it
|
||||
**impossible** to match against paths containing literal glob
|
||||
pattern characters, but allows matching with patterns constructed
|
||||
using `path.join()` and `path.resolve()` on Windows platforms,
|
||||
mimicking the (buggy!) behavior of Glob v7 and before on
|
||||
Windows. Please use with caution, and be mindful of [the caveat
|
||||
below about Windows paths](#windows). (For legacy reasons,
|
||||
this is also set if `allowWindowsEscape` is set to the exact
|
||||
value `false`.)
|
||||
* `dot` Include `.dot` files in normal matches and `globstar` matches.
|
||||
Note that an explicit dot in a portion of the pattern will always
|
||||
match dot files.
|
||||
* `nomount` By default, a pattern starting with a forward-slash will be
|
||||
"mounted" onto the root setting, so that a valid filesystem path is
|
||||
returned. Set this flag to disable that behavior.
|
||||
* `mark` Add a `/` character to directory matches. Note that this
|
||||
requires additional stat calls.
|
||||
* `nosort` Don't sort the results.
|
||||
* `stat` Set to true to stat *all* results. This reduces performance
|
||||
somewhat, and is completely unnecessary, unless `readdir` is presumed
|
||||
to be an untrustworthy indicator of file existence.
|
||||
* `silent` When an unusual error is encountered when attempting to
|
||||
read a directory, a warning will be printed to stderr. Set the
|
||||
`silent` option to true to suppress these warnings.
|
||||
* `strict` When an unusual error is encountered when attempting to
|
||||
read a directory, the process will just continue on in search of
|
||||
other matches. Set the `strict` option to raise an error in these
|
||||
cases.
|
||||
* `cache` See `cache` property above. Pass in a previously generated
|
||||
cache object to save some fs calls.
|
||||
* `statCache` A cache of results of filesystem information, to prevent
|
||||
unnecessary stat calls. While it should not normally be necessary
|
||||
to set this, you may pass the statCache from one glob() call to the
|
||||
options object of another, if you know that the filesystem will not
|
||||
change between calls. (See "Race Conditions" below.)
|
||||
* `symlinks` A cache of known symbolic links. You may pass in a
|
||||
previously generated `symlinks` object to save `lstat` calls when
|
||||
resolving `**` matches.
|
||||
* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead.
|
||||
* `nounique` In some cases, brace-expanded patterns can result in the
|
||||
same file showing up multiple times in the result set. By default,
|
||||
this implementation prevents duplicates in the result set. Set this
|
||||
flag to disable that behavior.
|
||||
* `nonull` Set to never return an empty set, instead returning a set
|
||||
containing the pattern itself. This is the default in glob(3).
|
||||
* `debug` Set to enable debug logging in minimatch and glob.
|
||||
* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
* `noglobstar` Do not match `**` against multiple filenames. (Ie,
|
||||
treat it as a normal `*` instead.)
|
||||
* `noext` Do not match `+(a|b)` "extglob" patterns.
|
||||
* `nocase` Perform a case-insensitive match. Note: on
|
||||
case-insensitive filesystems, non-magic patterns will match by
|
||||
default, since `stat` and `readdir` will not raise errors.
|
||||
* `matchBase` Perform a basename-only match if the pattern does not
|
||||
contain any slash characters. That is, `*.js` would be treated as
|
||||
equivalent to `**/*.js`, matching all js files in all directories.
|
||||
* `nodir` Do not match directories, only files. (Note: to match
|
||||
*only* directories, simply put a `/` at the end of the pattern.)
|
||||
* `ignore` Add a pattern or an array of glob patterns to exclude matches.
|
||||
Note: `ignore` patterns are *always* in `dot:true` mode, regardless
|
||||
of any other settings.
|
||||
* `follow` Follow symlinked directories when expanding `**` patterns.
|
||||
Note that this can result in a lot of duplicate references in the
|
||||
presence of cyclic links.
|
||||
* `realpath` Set to true to call `fs.realpath` on all of the results.
|
||||
In the case of a symlink that cannot be resolved, the full absolute
|
||||
path to the matched entry is returned (though it will usually be a
|
||||
broken symlink)
|
||||
* `absolute` Set to true to always receive absolute paths for matched
|
||||
files. Unlike `realpath`, this also affects the values returned in
|
||||
the `match` event.
|
||||
* `fs` File-system object with Node's `fs` API. By default, the built-in
|
||||
`fs` module will be used. Set to a volume provided by a library like
|
||||
`memfs` to avoid using the "real" file-system.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between node-glob and other
|
||||
implementations, and are intentional.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.3, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
Note that symlinked directories are not crawled as part of a `**`,
|
||||
though their contents may match against subsequent portions of the
|
||||
pattern. This prevents infinite loops and duplicates and the like.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then glob returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
### Comments and Negation
|
||||
|
||||
Previously, this module let you mark a pattern as a "comment" if it
|
||||
started with a `#` character, or a "negated" pattern if it started
|
||||
with a `!` character.
|
||||
|
||||
These options were deprecated in version 5, and removed in version 6.
|
||||
|
||||
To specify things that should not match, use the `ignore` option.
|
||||
|
||||
## Windows
|
||||
|
||||
**Please only use forward-slashes in glob expressions.**
|
||||
|
||||
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||
characters are used by this glob implementation. You must use
|
||||
forward-slashes **only** in glob expressions. Back-slashes will always
|
||||
be interpreted as escape characters, not path separators.
|
||||
|
||||
Results from absolute patterns such as `/foo/*` are mounted onto the
|
||||
root setting using `path.join`. On windows, this will by default result
|
||||
in `/foo/*` matching `C:\foo\bar.txt`.
|
||||
|
||||
To automatically coerce all `\` characters to `/` in pattern
|
||||
strings, **thus making it impossible to escape literal glob
|
||||
characters**, you may set the `windowsPathsNoEscape` option to
|
||||
`true`.
|
||||
|
||||
## Race Conditions
|
||||
|
||||
Glob searching, by its very nature, is susceptible to race conditions,
|
||||
since it relies on directory walking and such.
|
||||
|
||||
As a result, it is possible that a file that exists when glob looks for
|
||||
it may have been deleted or modified by the time it returns the result.
|
||||
|
||||
As part of its internal implementation, this program caches all stat
|
||||
and readdir calls that it makes, in order to cut down on system
|
||||
overhead. However, this also makes it even more susceptible to races,
|
||||
especially if the cache or statCache objects are reused between glob
|
||||
calls.
|
||||
|
||||
Users are thus advised not to use a glob result as a guarantee of
|
||||
filesystem state in the face of rapid changes. For the vast majority
|
||||
of operations, this is never a problem.
|
||||
|
||||
## Glob Logo
|
||||
Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo).
|
||||
|
||||
The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/).
|
||||
|
||||
## Contributing
|
||||
|
||||
Any change to behavior (including bugfixes) must come with a test.
|
||||
|
||||
Patches that fail tests or reduce performance will be rejected.
|
||||
|
||||
```
|
||||
# to run tests
|
||||
npm test
|
||||
|
||||
# to re-generate test fixtures
|
||||
npm run test-regen
|
||||
|
||||
# to benchmark against bash/zsh
|
||||
npm run bench
|
||||
|
||||
# to profile javascript
|
||||
npm run prof
|
||||
```
|
||||
|
||||

|
||||
244
desktop-operator/node_modules/cacache/node_modules/glob/common.js
generated
vendored
Normal file
244
desktop-operator/node_modules/cacache/node_modules/glob/common.js
generated
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
exports.setopts = setopts
|
||||
exports.ownProp = ownProp
|
||||
exports.makeAbs = makeAbs
|
||||
exports.finish = finish
|
||||
exports.mark = mark
|
||||
exports.isIgnored = isIgnored
|
||||
exports.childrenIgnored = childrenIgnored
|
||||
|
||||
function ownProp (obj, field) {
|
||||
return Object.prototype.hasOwnProperty.call(obj, field)
|
||||
}
|
||||
|
||||
var fs = require("fs")
|
||||
var path = require("path")
|
||||
var minimatch = require("minimatch")
|
||||
var isAbsolute = require("path").isAbsolute
|
||||
var Minimatch = minimatch.Minimatch
|
||||
|
||||
function alphasort (a, b) {
|
||||
return a.localeCompare(b, 'en')
|
||||
}
|
||||
|
||||
function setupIgnores (self, options) {
|
||||
self.ignore = options.ignore || []
|
||||
|
||||
if (!Array.isArray(self.ignore))
|
||||
self.ignore = [self.ignore]
|
||||
|
||||
if (self.ignore.length) {
|
||||
self.ignore = self.ignore.map(ignoreMap)
|
||||
}
|
||||
}
|
||||
|
||||
// ignore patterns are always in dot:true mode.
|
||||
function ignoreMap (pattern) {
|
||||
var gmatcher = null
|
||||
if (pattern.slice(-3) === '/**') {
|
||||
var gpattern = pattern.replace(/(\/\*\*)+$/, '')
|
||||
gmatcher = new Minimatch(gpattern, { dot: true })
|
||||
}
|
||||
|
||||
return {
|
||||
matcher: new Minimatch(pattern, { dot: true }),
|
||||
gmatcher: gmatcher
|
||||
}
|
||||
}
|
||||
|
||||
function setopts (self, pattern, options) {
|
||||
if (!options)
|
||||
options = {}
|
||||
|
||||
// base-matching: just use globstar for that.
|
||||
if (options.matchBase && -1 === pattern.indexOf("/")) {
|
||||
if (options.noglobstar) {
|
||||
throw new Error("base matching requires globstar")
|
||||
}
|
||||
pattern = "**/" + pattern
|
||||
}
|
||||
|
||||
self.windowsPathsNoEscape = !!options.windowsPathsNoEscape ||
|
||||
options.allowWindowsEscape === false
|
||||
if (self.windowsPathsNoEscape) {
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
}
|
||||
|
||||
self.silent = !!options.silent
|
||||
self.pattern = pattern
|
||||
self.strict = options.strict !== false
|
||||
self.realpath = !!options.realpath
|
||||
self.realpathCache = options.realpathCache || Object.create(null)
|
||||
self.follow = !!options.follow
|
||||
self.dot = !!options.dot
|
||||
self.mark = !!options.mark
|
||||
self.nodir = !!options.nodir
|
||||
if (self.nodir)
|
||||
self.mark = true
|
||||
self.sync = !!options.sync
|
||||
self.nounique = !!options.nounique
|
||||
self.nonull = !!options.nonull
|
||||
self.nosort = !!options.nosort
|
||||
self.nocase = !!options.nocase
|
||||
self.stat = !!options.stat
|
||||
self.noprocess = !!options.noprocess
|
||||
self.absolute = !!options.absolute
|
||||
self.fs = options.fs || fs
|
||||
|
||||
self.maxLength = options.maxLength || Infinity
|
||||
self.cache = options.cache || Object.create(null)
|
||||
self.statCache = options.statCache || Object.create(null)
|
||||
self.symlinks = options.symlinks || Object.create(null)
|
||||
|
||||
setupIgnores(self, options)
|
||||
|
||||
self.changedCwd = false
|
||||
var cwd = process.cwd()
|
||||
if (!ownProp(options, "cwd"))
|
||||
self.cwd = path.resolve(cwd)
|
||||
else {
|
||||
self.cwd = path.resolve(options.cwd)
|
||||
self.changedCwd = self.cwd !== cwd
|
||||
}
|
||||
|
||||
self.root = options.root || path.resolve(self.cwd, "/")
|
||||
self.root = path.resolve(self.root)
|
||||
|
||||
// TODO: is an absolute `cwd` supposed to be resolved against `root`?
|
||||
// e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
|
||||
self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
|
||||
self.nomount = !!options.nomount
|
||||
|
||||
if (process.platform === "win32") {
|
||||
self.root = self.root.replace(/\\/g, "/")
|
||||
self.cwd = self.cwd.replace(/\\/g, "/")
|
||||
self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// disable comments and negation in Minimatch.
|
||||
// Note that they are not supported in Glob itself anyway.
|
||||
options.nonegate = true
|
||||
options.nocomment = true
|
||||
|
||||
self.minimatch = new Minimatch(pattern, options)
|
||||
self.options = self.minimatch.options
|
||||
}
|
||||
|
||||
function finish (self) {
|
||||
var nou = self.nounique
|
||||
var all = nou ? [] : Object.create(null)
|
||||
|
||||
for (var i = 0, l = self.matches.length; i < l; i ++) {
|
||||
var matches = self.matches[i]
|
||||
if (!matches || Object.keys(matches).length === 0) {
|
||||
if (self.nonull) {
|
||||
// do like the shell, and spit out the literal glob
|
||||
var literal = self.minimatch.globSet[i]
|
||||
if (nou)
|
||||
all.push(literal)
|
||||
else
|
||||
all[literal] = true
|
||||
}
|
||||
} else {
|
||||
// had matches
|
||||
var m = Object.keys(matches)
|
||||
if (nou)
|
||||
all.push.apply(all, m)
|
||||
else
|
||||
m.forEach(function (m) {
|
||||
all[m] = true
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (!nou)
|
||||
all = Object.keys(all)
|
||||
|
||||
if (!self.nosort)
|
||||
all = all.sort(alphasort)
|
||||
|
||||
// at *some* point we statted all of these
|
||||
if (self.mark) {
|
||||
for (var i = 0; i < all.length; i++) {
|
||||
all[i] = self._mark(all[i])
|
||||
}
|
||||
if (self.nodir) {
|
||||
all = all.filter(function (e) {
|
||||
var notDir = !(/\/$/.test(e))
|
||||
var c = self.cache[e] || self.cache[makeAbs(self, e)]
|
||||
if (notDir && c)
|
||||
notDir = c !== 'DIR' && !Array.isArray(c)
|
||||
return notDir
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (self.ignore.length)
|
||||
all = all.filter(function(m) {
|
||||
return !isIgnored(self, m)
|
||||
})
|
||||
|
||||
self.found = all
|
||||
}
|
||||
|
||||
function mark (self, p) {
|
||||
var abs = makeAbs(self, p)
|
||||
var c = self.cache[abs]
|
||||
var m = p
|
||||
if (c) {
|
||||
var isDir = c === 'DIR' || Array.isArray(c)
|
||||
var slash = p.slice(-1) === '/'
|
||||
|
||||
if (isDir && !slash)
|
||||
m += '/'
|
||||
else if (!isDir && slash)
|
||||
m = m.slice(0, -1)
|
||||
|
||||
if (m !== p) {
|
||||
var mabs = makeAbs(self, m)
|
||||
self.statCache[mabs] = self.statCache[abs]
|
||||
self.cache[mabs] = self.cache[abs]
|
||||
}
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
// lotta situps...
|
||||
function makeAbs (self, f) {
|
||||
var abs = f
|
||||
if (f.charAt(0) === '/') {
|
||||
abs = path.join(self.root, f)
|
||||
} else if (isAbsolute(f) || f === '') {
|
||||
abs = f
|
||||
} else if (self.changedCwd) {
|
||||
abs = path.resolve(self.cwd, f)
|
||||
} else {
|
||||
abs = path.resolve(f)
|
||||
}
|
||||
|
||||
if (process.platform === 'win32')
|
||||
abs = abs.replace(/\\/g, '/')
|
||||
|
||||
return abs
|
||||
}
|
||||
|
||||
|
||||
// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
|
||||
// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
|
||||
function isIgnored (self, path) {
|
||||
if (!self.ignore.length)
|
||||
return false
|
||||
|
||||
return self.ignore.some(function(item) {
|
||||
return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
|
||||
})
|
||||
}
|
||||
|
||||
function childrenIgnored (self, path) {
|
||||
if (!self.ignore.length)
|
||||
return false
|
||||
|
||||
return self.ignore.some(function(item) {
|
||||
return !!(item.gmatcher && item.gmatcher.match(path))
|
||||
})
|
||||
}
|
||||
790
desktop-operator/node_modules/cacache/node_modules/glob/glob.js
generated
vendored
Normal file
790
desktop-operator/node_modules/cacache/node_modules/glob/glob.js
generated
vendored
Normal file
@@ -0,0 +1,790 @@
|
||||
// Approach:
|
||||
//
|
||||
// 1. Get the minimatch set
|
||||
// 2. For each pattern in the set, PROCESS(pattern, false)
|
||||
// 3. Store matches per-set, then uniq them
|
||||
//
|
||||
// PROCESS(pattern, inGlobStar)
|
||||
// Get the first [n] items from pattern that are all strings
|
||||
// Join these together. This is PREFIX.
|
||||
// If there is no more remaining, then stat(PREFIX) and
|
||||
// add to matches if it succeeds. END.
|
||||
//
|
||||
// If inGlobStar and PREFIX is symlink and points to dir
|
||||
// set ENTRIES = []
|
||||
// else readdir(PREFIX) as ENTRIES
|
||||
// If fail, END
|
||||
//
|
||||
// with ENTRIES
|
||||
// If pattern[n] is GLOBSTAR
|
||||
// // handle the case where the globstar match is empty
|
||||
// // by pruning it out, and testing the resulting pattern
|
||||
// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
|
||||
// // handle other cases.
|
||||
// for ENTRY in ENTRIES (not dotfiles)
|
||||
// // attach globstar + tail onto the entry
|
||||
// // Mark that this entry is a globstar match
|
||||
// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
|
||||
//
|
||||
// else // not globstar
|
||||
// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
|
||||
// Test ENTRY against pattern[n]
|
||||
// If fails, continue
|
||||
// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
|
||||
//
|
||||
// Caveat:
|
||||
// Cache all stats and readdirs results to minimize syscall. Since all
|
||||
// we ever care about is existence and directory-ness, we can just keep
|
||||
// `true` for files, and [children,...] for directories, or `false` for
|
||||
// things that don't exist.
|
||||
|
||||
module.exports = glob
|
||||
|
||||
var rp = require('fs.realpath')
|
||||
var minimatch = require('minimatch')
|
||||
var Minimatch = minimatch.Minimatch
|
||||
var inherits = require('inherits')
|
||||
var EE = require('events').EventEmitter
|
||||
var path = require('path')
|
||||
var assert = require('assert')
|
||||
var isAbsolute = require('path').isAbsolute
|
||||
var globSync = require('./sync.js')
|
||||
var common = require('./common.js')
|
||||
var setopts = common.setopts
|
||||
var ownProp = common.ownProp
|
||||
var inflight = require('inflight')
|
||||
var util = require('util')
|
||||
var childrenIgnored = common.childrenIgnored
|
||||
var isIgnored = common.isIgnored
|
||||
|
||||
var once = require('once')
|
||||
|
||||
function glob (pattern, options, cb) {
|
||||
if (typeof options === 'function') cb = options, options = {}
|
||||
if (!options) options = {}
|
||||
|
||||
if (options.sync) {
|
||||
if (cb)
|
||||
throw new TypeError('callback provided to sync glob')
|
||||
return globSync(pattern, options)
|
||||
}
|
||||
|
||||
return new Glob(pattern, options, cb)
|
||||
}
|
||||
|
||||
glob.sync = globSync
|
||||
var GlobSync = glob.GlobSync = globSync.GlobSync
|
||||
|
||||
// old api surface
|
||||
glob.glob = glob
|
||||
|
||||
function extend (origin, add) {
|
||||
if (add === null || typeof add !== 'object') {
|
||||
return origin
|
||||
}
|
||||
|
||||
var keys = Object.keys(add)
|
||||
var i = keys.length
|
||||
while (i--) {
|
||||
origin[keys[i]] = add[keys[i]]
|
||||
}
|
||||
return origin
|
||||
}
|
||||
|
||||
glob.hasMagic = function (pattern, options_) {
|
||||
var options = extend({}, options_)
|
||||
options.noprocess = true
|
||||
|
||||
var g = new Glob(pattern, options)
|
||||
var set = g.minimatch.set
|
||||
|
||||
if (!pattern)
|
||||
return false
|
||||
|
||||
if (set.length > 1)
|
||||
return true
|
||||
|
||||
for (var j = 0; j < set[0].length; j++) {
|
||||
if (typeof set[0][j] !== 'string')
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
glob.Glob = Glob
|
||||
inherits(Glob, EE)
|
||||
function Glob (pattern, options, cb) {
|
||||
if (typeof options === 'function') {
|
||||
cb = options
|
||||
options = null
|
||||
}
|
||||
|
||||
if (options && options.sync) {
|
||||
if (cb)
|
||||
throw new TypeError('callback provided to sync glob')
|
||||
return new GlobSync(pattern, options)
|
||||
}
|
||||
|
||||
if (!(this instanceof Glob))
|
||||
return new Glob(pattern, options, cb)
|
||||
|
||||
setopts(this, pattern, options)
|
||||
this._didRealPath = false
|
||||
|
||||
// process each pattern in the minimatch set
|
||||
var n = this.minimatch.set.length
|
||||
|
||||
// The matches are stored as {<filename>: true,...} so that
|
||||
// duplicates are automagically pruned.
|
||||
// Later, we do an Object.keys() on these.
|
||||
// Keep them as a list so we can fill in when nonull is set.
|
||||
this.matches = new Array(n)
|
||||
|
||||
if (typeof cb === 'function') {
|
||||
cb = once(cb)
|
||||
this.on('error', cb)
|
||||
this.on('end', function (matches) {
|
||||
cb(null, matches)
|
||||
})
|
||||
}
|
||||
|
||||
var self = this
|
||||
this._processing = 0
|
||||
|
||||
this._emitQueue = []
|
||||
this._processQueue = []
|
||||
this.paused = false
|
||||
|
||||
if (this.noprocess)
|
||||
return this
|
||||
|
||||
if (n === 0)
|
||||
return done()
|
||||
|
||||
var sync = true
|
||||
for (var i = 0; i < n; i ++) {
|
||||
this._process(this.minimatch.set[i], i, false, done)
|
||||
}
|
||||
sync = false
|
||||
|
||||
function done () {
|
||||
--self._processing
|
||||
if (self._processing <= 0) {
|
||||
if (sync) {
|
||||
process.nextTick(function () {
|
||||
self._finish()
|
||||
})
|
||||
} else {
|
||||
self._finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._finish = function () {
|
||||
assert(this instanceof Glob)
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
if (this.realpath && !this._didRealpath)
|
||||
return this._realpath()
|
||||
|
||||
common.finish(this)
|
||||
this.emit('end', this.found)
|
||||
}
|
||||
|
||||
Glob.prototype._realpath = function () {
|
||||
if (this._didRealpath)
|
||||
return
|
||||
|
||||
this._didRealpath = true
|
||||
|
||||
var n = this.matches.length
|
||||
if (n === 0)
|
||||
return this._finish()
|
||||
|
||||
var self = this
|
||||
for (var i = 0; i < this.matches.length; i++)
|
||||
this._realpathSet(i, next)
|
||||
|
||||
function next () {
|
||||
if (--n === 0)
|
||||
self._finish()
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._realpathSet = function (index, cb) {
|
||||
var matchset = this.matches[index]
|
||||
if (!matchset)
|
||||
return cb()
|
||||
|
||||
var found = Object.keys(matchset)
|
||||
var self = this
|
||||
var n = found.length
|
||||
|
||||
if (n === 0)
|
||||
return cb()
|
||||
|
||||
var set = this.matches[index] = Object.create(null)
|
||||
found.forEach(function (p, i) {
|
||||
// If there's a problem with the stat, then it means that
|
||||
// one or more of the links in the realpath couldn't be
|
||||
// resolved. just return the abs value in that case.
|
||||
p = self._makeAbs(p)
|
||||
rp.realpath(p, self.realpathCache, function (er, real) {
|
||||
if (!er)
|
||||
set[real] = true
|
||||
else if (er.syscall === 'stat')
|
||||
set[p] = true
|
||||
else
|
||||
self.emit('error', er) // srsly wtf right here
|
||||
|
||||
if (--n === 0) {
|
||||
self.matches[index] = set
|
||||
cb()
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Glob.prototype._mark = function (p) {
|
||||
return common.mark(this, p)
|
||||
}
|
||||
|
||||
Glob.prototype._makeAbs = function (f) {
|
||||
return common.makeAbs(this, f)
|
||||
}
|
||||
|
||||
Glob.prototype.abort = function () {
|
||||
this.aborted = true
|
||||
this.emit('abort')
|
||||
}
|
||||
|
||||
Glob.prototype.pause = function () {
|
||||
if (!this.paused) {
|
||||
this.paused = true
|
||||
this.emit('pause')
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype.resume = function () {
|
||||
if (this.paused) {
|
||||
this.emit('resume')
|
||||
this.paused = false
|
||||
if (this._emitQueue.length) {
|
||||
var eq = this._emitQueue.slice(0)
|
||||
this._emitQueue.length = 0
|
||||
for (var i = 0; i < eq.length; i ++) {
|
||||
var e = eq[i]
|
||||
this._emitMatch(e[0], e[1])
|
||||
}
|
||||
}
|
||||
if (this._processQueue.length) {
|
||||
var pq = this._processQueue.slice(0)
|
||||
this._processQueue.length = 0
|
||||
for (var i = 0; i < pq.length; i ++) {
|
||||
var p = pq[i]
|
||||
this._processing--
|
||||
this._process(p[0], p[1], p[2], p[3])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
|
||||
assert(this instanceof Glob)
|
||||
assert(typeof cb === 'function')
|
||||
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
this._processing++
|
||||
if (this.paused) {
|
||||
this._processQueue.push([pattern, index, inGlobStar, cb])
|
||||
return
|
||||
}
|
||||
|
||||
//console.error('PROCESS %d', this._processing, pattern)
|
||||
|
||||
// Get the first [n] parts of pattern that are all strings.
|
||||
var n = 0
|
||||
while (typeof pattern[n] === 'string') {
|
||||
n ++
|
||||
}
|
||||
// now n is the index of the first one that is *not* a string.
|
||||
|
||||
// see if there's anything else
|
||||
var prefix
|
||||
switch (n) {
|
||||
// if not, then this is rather simple
|
||||
case pattern.length:
|
||||
this._processSimple(pattern.join('/'), index, cb)
|
||||
return
|
||||
|
||||
case 0:
|
||||
// pattern *starts* with some non-trivial item.
|
||||
// going to readdir(cwd), but not include the prefix in matches.
|
||||
prefix = null
|
||||
break
|
||||
|
||||
default:
|
||||
// pattern has some string bits in the front.
|
||||
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||
// or 'relative' like '../baz'
|
||||
prefix = pattern.slice(0, n).join('/')
|
||||
break
|
||||
}
|
||||
|
||||
var remain = pattern.slice(n)
|
||||
|
||||
// get the list of entries.
|
||||
var read
|
||||
if (prefix === null)
|
||||
read = '.'
|
||||
else if (isAbsolute(prefix) ||
|
||||
isAbsolute(pattern.map(function (p) {
|
||||
return typeof p === 'string' ? p : '[*]'
|
||||
}).join('/'))) {
|
||||
if (!prefix || !isAbsolute(prefix))
|
||||
prefix = '/' + prefix
|
||||
read = prefix
|
||||
} else
|
||||
read = prefix
|
||||
|
||||
var abs = this._makeAbs(read)
|
||||
|
||||
//if ignored, skip _processing
|
||||
if (childrenIgnored(this, read))
|
||||
return cb()
|
||||
|
||||
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||
if (isGlobStar)
|
||||
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||
else
|
||||
this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
|
||||
}
|
||||
|
||||
Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||
var self = this
|
||||
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||
return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||
})
|
||||
}
|
||||
|
||||
Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||
|
||||
// if the abs isn't a dir, then nothing can match!
|
||||
if (!entries)
|
||||
return cb()
|
||||
|
||||
// It will only match dot entries if it starts with a dot, or if
|
||||
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||
var pn = remain[0]
|
||||
var negate = !!this.minimatch.negate
|
||||
var rawGlob = pn._glob
|
||||
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||
|
||||
var matchedEntries = []
|
||||
for (var i = 0; i < entries.length; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) !== '.' || dotOk) {
|
||||
var m
|
||||
if (negate && !prefix) {
|
||||
m = !e.match(pn)
|
||||
} else {
|
||||
m = e.match(pn)
|
||||
}
|
||||
if (m)
|
||||
matchedEntries.push(e)
|
||||
}
|
||||
}
|
||||
|
||||
//console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
|
||||
|
||||
var len = matchedEntries.length
|
||||
// If there are no matched entries, then nothing matches.
|
||||
if (len === 0)
|
||||
return cb()
|
||||
|
||||
// if this is the last remaining pattern bit, then no need for
|
||||
// an additional stat *unless* the user has specified mark or
|
||||
// stat explicitly. We know they exist, since readdir returned
|
||||
// them.
|
||||
|
||||
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
if (prefix) {
|
||||
if (prefix !== '/')
|
||||
e = prefix + '/' + e
|
||||
else
|
||||
e = prefix + e
|
||||
}
|
||||
|
||||
if (e.charAt(0) === '/' && !this.nomount) {
|
||||
e = path.join(this.root, e)
|
||||
}
|
||||
this._emitMatch(index, e)
|
||||
}
|
||||
// This was the last one, and no stats were needed
|
||||
return cb()
|
||||
}
|
||||
|
||||
// now test all matched entries as stand-ins for that part
|
||||
// of the pattern.
|
||||
remain.shift()
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
var newPattern
|
||||
if (prefix) {
|
||||
if (prefix !== '/')
|
||||
e = prefix + '/' + e
|
||||
else
|
||||
e = prefix + e
|
||||
}
|
||||
this._process([e].concat(remain), index, inGlobStar, cb)
|
||||
}
|
||||
cb()
|
||||
}
|
||||
|
||||
Glob.prototype._emitMatch = function (index, e) {
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
if (isIgnored(this, e))
|
||||
return
|
||||
|
||||
if (this.paused) {
|
||||
this._emitQueue.push([index, e])
|
||||
return
|
||||
}
|
||||
|
||||
var abs = isAbsolute(e) ? e : this._makeAbs(e)
|
||||
|
||||
if (this.mark)
|
||||
e = this._mark(e)
|
||||
|
||||
if (this.absolute)
|
||||
e = abs
|
||||
|
||||
if (this.matches[index][e])
|
||||
return
|
||||
|
||||
if (this.nodir) {
|
||||
var c = this.cache[abs]
|
||||
if (c === 'DIR' || Array.isArray(c))
|
||||
return
|
||||
}
|
||||
|
||||
this.matches[index][e] = true
|
||||
|
||||
var st = this.statCache[abs]
|
||||
if (st)
|
||||
this.emit('stat', e, st)
|
||||
|
||||
this.emit('match', e)
|
||||
}
|
||||
|
||||
Glob.prototype._readdirInGlobStar = function (abs, cb) {
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
// follow all symlinked directories forever
|
||||
// just proceed as if this is a non-globstar situation
|
||||
if (this.follow)
|
||||
return this._readdir(abs, false, cb)
|
||||
|
||||
var lstatkey = 'lstat\0' + abs
|
||||
var self = this
|
||||
var lstatcb = inflight(lstatkey, lstatcb_)
|
||||
|
||||
if (lstatcb)
|
||||
self.fs.lstat(abs, lstatcb)
|
||||
|
||||
function lstatcb_ (er, lstat) {
|
||||
if (er && er.code === 'ENOENT')
|
||||
return cb()
|
||||
|
||||
var isSym = lstat && lstat.isSymbolicLink()
|
||||
self.symlinks[abs] = isSym
|
||||
|
||||
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||
// don't bother doing a readdir in that case.
|
||||
if (!isSym && lstat && !lstat.isDirectory()) {
|
||||
self.cache[abs] = 'FILE'
|
||||
cb()
|
||||
} else
|
||||
self._readdir(abs, false, cb)
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._readdir = function (abs, inGlobStar, cb) {
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
|
||||
if (!cb)
|
||||
return
|
||||
|
||||
//console.error('RD %j %j', +inGlobStar, abs)
|
||||
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||
return this._readdirInGlobStar(abs, cb)
|
||||
|
||||
if (ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
if (!c || c === 'FILE')
|
||||
return cb()
|
||||
|
||||
if (Array.isArray(c))
|
||||
return cb(null, c)
|
||||
}
|
||||
|
||||
var self = this
|
||||
self.fs.readdir(abs, readdirCb(this, abs, cb))
|
||||
}
|
||||
|
||||
function readdirCb (self, abs, cb) {
|
||||
return function (er, entries) {
|
||||
if (er)
|
||||
self._readdirError(abs, er, cb)
|
||||
else
|
||||
self._readdirEntries(abs, entries, cb)
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._readdirEntries = function (abs, entries, cb) {
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
// if we haven't asked to stat everything, then just
|
||||
// assume that everything in there exists, so we can avoid
|
||||
// having to stat it a second time.
|
||||
if (!this.mark && !this.stat) {
|
||||
for (var i = 0; i < entries.length; i ++) {
|
||||
var e = entries[i]
|
||||
if (abs === '/')
|
||||
e = abs + e
|
||||
else
|
||||
e = abs + '/' + e
|
||||
this.cache[e] = true
|
||||
}
|
||||
}
|
||||
|
||||
this.cache[abs] = entries
|
||||
return cb(null, entries)
|
||||
}
|
||||
|
||||
Glob.prototype._readdirError = function (f, er, cb) {
|
||||
if (this.aborted)
|
||||
return
|
||||
|
||||
// handle errors, and cache the information
|
||||
switch (er.code) {
|
||||
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||
var abs = this._makeAbs(f)
|
||||
this.cache[abs] = 'FILE'
|
||||
if (abs === this.cwdAbs) {
|
||||
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||||
error.path = this.cwd
|
||||
error.code = er.code
|
||||
this.emit('error', error)
|
||||
this.abort()
|
||||
}
|
||||
break
|
||||
|
||||
case 'ENOENT': // not terribly unusual
|
||||
case 'ELOOP':
|
||||
case 'ENAMETOOLONG':
|
||||
case 'UNKNOWN':
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
break
|
||||
|
||||
default: // some unusual error. Treat as failure.
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
if (this.strict) {
|
||||
this.emit('error', er)
|
||||
// If the error is handled, then we abort
|
||||
// if not, we threw out of here
|
||||
this.abort()
|
||||
}
|
||||
if (!this.silent)
|
||||
console.error('glob error', er)
|
||||
break
|
||||
}
|
||||
|
||||
return cb()
|
||||
}
|
||||
|
||||
Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
|
||||
var self = this
|
||||
this._readdir(abs, inGlobStar, function (er, entries) {
|
||||
self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
|
||||
//console.error('pgs2', prefix, remain[0], entries)
|
||||
|
||||
// no entries means not a dir, so it can never have matches
|
||||
// foo.txt/** doesn't match foo.txt
|
||||
if (!entries)
|
||||
return cb()
|
||||
|
||||
// test without the globstar, and with every child both below
|
||||
// and replacing the globstar.
|
||||
var remainWithoutGlobStar = remain.slice(1)
|
||||
var gspref = prefix ? [ prefix ] : []
|
||||
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||
|
||||
// the noGlobStar pattern exits the inGlobStar state
|
||||
this._process(noGlobStar, index, false, cb)
|
||||
|
||||
var isSym = this.symlinks[abs]
|
||||
var len = entries.length
|
||||
|
||||
// If it's a symlink, and we're in a globstar, then stop
|
||||
if (isSym && inGlobStar)
|
||||
return cb()
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) === '.' && !this.dot)
|
||||
continue
|
||||
|
||||
// these two cases enter the inGlobStar state
|
||||
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||
this._process(instead, index, true, cb)
|
||||
|
||||
var below = gspref.concat(entries[i], remain)
|
||||
this._process(below, index, true, cb)
|
||||
}
|
||||
|
||||
cb()
|
||||
}
|
||||
|
||||
Glob.prototype._processSimple = function (prefix, index, cb) {
|
||||
// XXX review this. Shouldn't it be doing the mounting etc
|
||||
// before doing stat? kinda weird?
|
||||
var self = this
|
||||
this._stat(prefix, function (er, exists) {
|
||||
self._processSimple2(prefix, index, er, exists, cb)
|
||||
})
|
||||
}
|
||||
Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
|
||||
|
||||
//console.error('ps2', prefix, exists)
|
||||
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
// If it doesn't exist, then just mark the lack of results
|
||||
if (!exists)
|
||||
return cb()
|
||||
|
||||
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||
var trail = /[\/\\]$/.test(prefix)
|
||||
if (prefix.charAt(0) === '/') {
|
||||
prefix = path.join(this.root, prefix)
|
||||
} else {
|
||||
prefix = path.resolve(this.root, prefix)
|
||||
if (trail)
|
||||
prefix += '/'
|
||||
}
|
||||
}
|
||||
|
||||
if (process.platform === 'win32')
|
||||
prefix = prefix.replace(/\\/g, '/')
|
||||
|
||||
// Mark this as a match
|
||||
this._emitMatch(index, prefix)
|
||||
cb()
|
||||
}
|
||||
|
||||
// Returns either 'DIR', 'FILE', or false
|
||||
Glob.prototype._stat = function (f, cb) {
|
||||
var abs = this._makeAbs(f)
|
||||
var needDir = f.slice(-1) === '/'
|
||||
|
||||
if (f.length > this.maxLength)
|
||||
return cb()
|
||||
|
||||
if (!this.stat && ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
|
||||
if (Array.isArray(c))
|
||||
c = 'DIR'
|
||||
|
||||
// It exists, but maybe not how we need it
|
||||
if (!needDir || c === 'DIR')
|
||||
return cb(null, c)
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return cb()
|
||||
|
||||
// otherwise we have to stat, because maybe c=true
|
||||
// if we know it exists, but not what it is.
|
||||
}
|
||||
|
||||
var exists
|
||||
var stat = this.statCache[abs]
|
||||
if (stat !== undefined) {
|
||||
if (stat === false)
|
||||
return cb(null, stat)
|
||||
else {
|
||||
var type = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||
if (needDir && type === 'FILE')
|
||||
return cb()
|
||||
else
|
||||
return cb(null, type, stat)
|
||||
}
|
||||
}
|
||||
|
||||
var self = this
|
||||
var statcb = inflight('stat\0' + abs, lstatcb_)
|
||||
if (statcb)
|
||||
self.fs.lstat(abs, statcb)
|
||||
|
||||
function lstatcb_ (er, lstat) {
|
||||
if (lstat && lstat.isSymbolicLink()) {
|
||||
// If it's a symlink, then treat it as the target, unless
|
||||
// the target does not exist, then treat it as a file.
|
||||
return self.fs.stat(abs, function (er, stat) {
|
||||
if (er)
|
||||
self._stat2(f, abs, null, lstat, cb)
|
||||
else
|
||||
self._stat2(f, abs, er, stat, cb)
|
||||
})
|
||||
} else {
|
||||
self._stat2(f, abs, er, lstat, cb)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
|
||||
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||||
this.statCache[abs] = false
|
||||
return cb()
|
||||
}
|
||||
|
||||
var needDir = f.slice(-1) === '/'
|
||||
this.statCache[abs] = stat
|
||||
|
||||
if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
|
||||
return cb(null, false, stat)
|
||||
|
||||
var c = true
|
||||
if (stat)
|
||||
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||
this.cache[abs] = this.cache[abs] || c
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return cb()
|
||||
|
||||
return cb(null, c, stat)
|
||||
}
|
||||
55
desktop-operator/node_modules/cacache/node_modules/glob/package.json
generated
vendored
Normal file
55
desktop-operator/node_modules/cacache/node_modules/glob/package.json
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
|
||||
"name": "glob",
|
||||
"description": "a little globber",
|
||||
"version": "8.1.0",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/node-glob.git"
|
||||
},
|
||||
"main": "glob.js",
|
||||
"files": [
|
||||
"glob.js",
|
||||
"sync.js",
|
||||
"common.js"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs.realpath": "^1.0.0",
|
||||
"inflight": "^1.0.4",
|
||||
"inherits": "2",
|
||||
"minimatch": "^5.0.1",
|
||||
"once": "^1.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"memfs": "^3.2.0",
|
||||
"mkdirp": "0",
|
||||
"rimraf": "^2.2.8",
|
||||
"tap": "^16.0.1",
|
||||
"tick": "0.0.6"
|
||||
},
|
||||
"tap": {
|
||||
"before": "test/00-setup.js",
|
||||
"after": "test/zz-cleanup.js",
|
||||
"statements": 90,
|
||||
"branches": 90,
|
||||
"functions": 90,
|
||||
"lines": 90,
|
||||
"jobs": 1
|
||||
},
|
||||
"scripts": {
|
||||
"prepublish": "npm run benchclean",
|
||||
"profclean": "rm -f v8.log profile.txt",
|
||||
"test": "tap",
|
||||
"test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js",
|
||||
"bench": "bash benchmark.sh",
|
||||
"prof": "bash prof.sh && cat profile.txt",
|
||||
"benchclean": "node benchclean.js"
|
||||
},
|
||||
"license": "ISC",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/isaacs"
|
||||
}
|
||||
}
|
||||
486
desktop-operator/node_modules/cacache/node_modules/glob/sync.js
generated
vendored
Normal file
486
desktop-operator/node_modules/cacache/node_modules/glob/sync.js
generated
vendored
Normal file
@@ -0,0 +1,486 @@
|
||||
module.exports = globSync
|
||||
globSync.GlobSync = GlobSync
|
||||
|
||||
var rp = require('fs.realpath')
|
||||
var minimatch = require('minimatch')
|
||||
var Minimatch = minimatch.Minimatch
|
||||
var Glob = require('./glob.js').Glob
|
||||
var util = require('util')
|
||||
var path = require('path')
|
||||
var assert = require('assert')
|
||||
var isAbsolute = require('path').isAbsolute
|
||||
var common = require('./common.js')
|
||||
var setopts = common.setopts
|
||||
var ownProp = common.ownProp
|
||||
var childrenIgnored = common.childrenIgnored
|
||||
var isIgnored = common.isIgnored
|
||||
|
||||
function globSync (pattern, options) {
|
||||
if (typeof options === 'function' || arguments.length === 3)
|
||||
throw new TypeError('callback provided to sync glob\n'+
|
||||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||
|
||||
return new GlobSync(pattern, options).found
|
||||
}
|
||||
|
||||
function GlobSync (pattern, options) {
|
||||
if (!pattern)
|
||||
throw new Error('must provide pattern')
|
||||
|
||||
if (typeof options === 'function' || arguments.length === 3)
|
||||
throw new TypeError('callback provided to sync glob\n'+
|
||||
'See: https://github.com/isaacs/node-glob/issues/167')
|
||||
|
||||
if (!(this instanceof GlobSync))
|
||||
return new GlobSync(pattern, options)
|
||||
|
||||
setopts(this, pattern, options)
|
||||
|
||||
if (this.noprocess)
|
||||
return this
|
||||
|
||||
var n = this.minimatch.set.length
|
||||
this.matches = new Array(n)
|
||||
for (var i = 0; i < n; i ++) {
|
||||
this._process(this.minimatch.set[i], i, false)
|
||||
}
|
||||
this._finish()
|
||||
}
|
||||
|
||||
GlobSync.prototype._finish = function () {
|
||||
assert.ok(this instanceof GlobSync)
|
||||
if (this.realpath) {
|
||||
var self = this
|
||||
this.matches.forEach(function (matchset, index) {
|
||||
var set = self.matches[index] = Object.create(null)
|
||||
for (var p in matchset) {
|
||||
try {
|
||||
p = self._makeAbs(p)
|
||||
var real = rp.realpathSync(p, self.realpathCache)
|
||||
set[real] = true
|
||||
} catch (er) {
|
||||
if (er.syscall === 'stat')
|
||||
set[self._makeAbs(p)] = true
|
||||
else
|
||||
throw er
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
common.finish(this)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._process = function (pattern, index, inGlobStar) {
|
||||
assert.ok(this instanceof GlobSync)
|
||||
|
||||
// Get the first [n] parts of pattern that are all strings.
|
||||
var n = 0
|
||||
while (typeof pattern[n] === 'string') {
|
||||
n ++
|
||||
}
|
||||
// now n is the index of the first one that is *not* a string.
|
||||
|
||||
// See if there's anything else
|
||||
var prefix
|
||||
switch (n) {
|
||||
// if not, then this is rather simple
|
||||
case pattern.length:
|
||||
this._processSimple(pattern.join('/'), index)
|
||||
return
|
||||
|
||||
case 0:
|
||||
// pattern *starts* with some non-trivial item.
|
||||
// going to readdir(cwd), but not include the prefix in matches.
|
||||
prefix = null
|
||||
break
|
||||
|
||||
default:
|
||||
// pattern has some string bits in the front.
|
||||
// whatever it starts with, whether that's 'absolute' like /foo/bar,
|
||||
// or 'relative' like '../baz'
|
||||
prefix = pattern.slice(0, n).join('/')
|
||||
break
|
||||
}
|
||||
|
||||
var remain = pattern.slice(n)
|
||||
|
||||
// get the list of entries.
|
||||
var read
|
||||
if (prefix === null)
|
||||
read = '.'
|
||||
else if (isAbsolute(prefix) ||
|
||||
isAbsolute(pattern.map(function (p) {
|
||||
return typeof p === 'string' ? p : '[*]'
|
||||
}).join('/'))) {
|
||||
if (!prefix || !isAbsolute(prefix))
|
||||
prefix = '/' + prefix
|
||||
read = prefix
|
||||
} else
|
||||
read = prefix
|
||||
|
||||
var abs = this._makeAbs(read)
|
||||
|
||||
//if ignored, skip processing
|
||||
if (childrenIgnored(this, read))
|
||||
return
|
||||
|
||||
var isGlobStar = remain[0] === minimatch.GLOBSTAR
|
||||
if (isGlobStar)
|
||||
this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
|
||||
else
|
||||
this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||
var entries = this._readdir(abs, inGlobStar)
|
||||
|
||||
// if the abs isn't a dir, then nothing can match!
|
||||
if (!entries)
|
||||
return
|
||||
|
||||
// It will only match dot entries if it starts with a dot, or if
|
||||
// dot is set. Stuff like @(.foo|.bar) isn't allowed.
|
||||
var pn = remain[0]
|
||||
var negate = !!this.minimatch.negate
|
||||
var rawGlob = pn._glob
|
||||
var dotOk = this.dot || rawGlob.charAt(0) === '.'
|
||||
|
||||
var matchedEntries = []
|
||||
for (var i = 0; i < entries.length; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) !== '.' || dotOk) {
|
||||
var m
|
||||
if (negate && !prefix) {
|
||||
m = !e.match(pn)
|
||||
} else {
|
||||
m = e.match(pn)
|
||||
}
|
||||
if (m)
|
||||
matchedEntries.push(e)
|
||||
}
|
||||
}
|
||||
|
||||
var len = matchedEntries.length
|
||||
// If there are no matched entries, then nothing matches.
|
||||
if (len === 0)
|
||||
return
|
||||
|
||||
// if this is the last remaining pattern bit, then no need for
|
||||
// an additional stat *unless* the user has specified mark or
|
||||
// stat explicitly. We know they exist, since readdir returned
|
||||
// them.
|
||||
|
||||
if (remain.length === 1 && !this.mark && !this.stat) {
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
if (prefix) {
|
||||
if (prefix.slice(-1) !== '/')
|
||||
e = prefix + '/' + e
|
||||
else
|
||||
e = prefix + e
|
||||
}
|
||||
|
||||
if (e.charAt(0) === '/' && !this.nomount) {
|
||||
e = path.join(this.root, e)
|
||||
}
|
||||
this._emitMatch(index, e)
|
||||
}
|
||||
// This was the last one, and no stats were needed
|
||||
return
|
||||
}
|
||||
|
||||
// now test all matched entries as stand-ins for that part
|
||||
// of the pattern.
|
||||
remain.shift()
|
||||
for (var i = 0; i < len; i ++) {
|
||||
var e = matchedEntries[i]
|
||||
var newPattern
|
||||
if (prefix)
|
||||
newPattern = [prefix, e]
|
||||
else
|
||||
newPattern = [e]
|
||||
this._process(newPattern.concat(remain), index, inGlobStar)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._emitMatch = function (index, e) {
|
||||
if (isIgnored(this, e))
|
||||
return
|
||||
|
||||
var abs = this._makeAbs(e)
|
||||
|
||||
if (this.mark)
|
||||
e = this._mark(e)
|
||||
|
||||
if (this.absolute) {
|
||||
e = abs
|
||||
}
|
||||
|
||||
if (this.matches[index][e])
|
||||
return
|
||||
|
||||
if (this.nodir) {
|
||||
var c = this.cache[abs]
|
||||
if (c === 'DIR' || Array.isArray(c))
|
||||
return
|
||||
}
|
||||
|
||||
this.matches[index][e] = true
|
||||
|
||||
if (this.stat)
|
||||
this._stat(e)
|
||||
}
|
||||
|
||||
|
||||
GlobSync.prototype._readdirInGlobStar = function (abs) {
|
||||
// follow all symlinked directories forever
|
||||
// just proceed as if this is a non-globstar situation
|
||||
if (this.follow)
|
||||
return this._readdir(abs, false)
|
||||
|
||||
var entries
|
||||
var lstat
|
||||
var stat
|
||||
try {
|
||||
lstat = this.fs.lstatSync(abs)
|
||||
} catch (er) {
|
||||
if (er.code === 'ENOENT') {
|
||||
// lstat failed, doesn't exist
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
var isSym = lstat && lstat.isSymbolicLink()
|
||||
this.symlinks[abs] = isSym
|
||||
|
||||
// If it's not a symlink or a dir, then it's definitely a regular file.
|
||||
// don't bother doing a readdir in that case.
|
||||
if (!isSym && lstat && !lstat.isDirectory())
|
||||
this.cache[abs] = 'FILE'
|
||||
else
|
||||
entries = this._readdir(abs, false)
|
||||
|
||||
return entries
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdir = function (abs, inGlobStar) {
|
||||
var entries
|
||||
|
||||
if (inGlobStar && !ownProp(this.symlinks, abs))
|
||||
return this._readdirInGlobStar(abs)
|
||||
|
||||
if (ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
if (!c || c === 'FILE')
|
||||
return null
|
||||
|
||||
if (Array.isArray(c))
|
||||
return c
|
||||
}
|
||||
|
||||
try {
|
||||
return this._readdirEntries(abs, this.fs.readdirSync(abs))
|
||||
} catch (er) {
|
||||
this._readdirError(abs, er)
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdirEntries = function (abs, entries) {
|
||||
// if we haven't asked to stat everything, then just
|
||||
// assume that everything in there exists, so we can avoid
|
||||
// having to stat it a second time.
|
||||
if (!this.mark && !this.stat) {
|
||||
for (var i = 0; i < entries.length; i ++) {
|
||||
var e = entries[i]
|
||||
if (abs === '/')
|
||||
e = abs + e
|
||||
else
|
||||
e = abs + '/' + e
|
||||
this.cache[e] = true
|
||||
}
|
||||
}
|
||||
|
||||
this.cache[abs] = entries
|
||||
|
||||
// mark and cache dir-ness
|
||||
return entries
|
||||
}
|
||||
|
||||
GlobSync.prototype._readdirError = function (f, er) {
|
||||
// handle errors, and cache the information
|
||||
switch (er.code) {
|
||||
case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
|
||||
case 'ENOTDIR': // totally normal. means it *does* exist.
|
||||
var abs = this._makeAbs(f)
|
||||
this.cache[abs] = 'FILE'
|
||||
if (abs === this.cwdAbs) {
|
||||
var error = new Error(er.code + ' invalid cwd ' + this.cwd)
|
||||
error.path = this.cwd
|
||||
error.code = er.code
|
||||
throw error
|
||||
}
|
||||
break
|
||||
|
||||
case 'ENOENT': // not terribly unusual
|
||||
case 'ELOOP':
|
||||
case 'ENAMETOOLONG':
|
||||
case 'UNKNOWN':
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
break
|
||||
|
||||
default: // some unusual error. Treat as failure.
|
||||
this.cache[this._makeAbs(f)] = false
|
||||
if (this.strict)
|
||||
throw er
|
||||
if (!this.silent)
|
||||
console.error('glob error', er)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
|
||||
|
||||
var entries = this._readdir(abs, inGlobStar)
|
||||
|
||||
// no entries means not a dir, so it can never have matches
|
||||
// foo.txt/** doesn't match foo.txt
|
||||
if (!entries)
|
||||
return
|
||||
|
||||
// test without the globstar, and with every child both below
|
||||
// and replacing the globstar.
|
||||
var remainWithoutGlobStar = remain.slice(1)
|
||||
var gspref = prefix ? [ prefix ] : []
|
||||
var noGlobStar = gspref.concat(remainWithoutGlobStar)
|
||||
|
||||
// the noGlobStar pattern exits the inGlobStar state
|
||||
this._process(noGlobStar, index, false)
|
||||
|
||||
var len = entries.length
|
||||
var isSym = this.symlinks[abs]
|
||||
|
||||
// If it's a symlink, and we're in a globstar, then stop
|
||||
if (isSym && inGlobStar)
|
||||
return
|
||||
|
||||
for (var i = 0; i < len; i++) {
|
||||
var e = entries[i]
|
||||
if (e.charAt(0) === '.' && !this.dot)
|
||||
continue
|
||||
|
||||
// these two cases enter the inGlobStar state
|
||||
var instead = gspref.concat(entries[i], remainWithoutGlobStar)
|
||||
this._process(instead, index, true)
|
||||
|
||||
var below = gspref.concat(entries[i], remain)
|
||||
this._process(below, index, true)
|
||||
}
|
||||
}
|
||||
|
||||
GlobSync.prototype._processSimple = function (prefix, index) {
|
||||
// XXX review this. Shouldn't it be doing the mounting etc
|
||||
// before doing stat? kinda weird?
|
||||
var exists = this._stat(prefix)
|
||||
|
||||
if (!this.matches[index])
|
||||
this.matches[index] = Object.create(null)
|
||||
|
||||
// If it doesn't exist, then just mark the lack of results
|
||||
if (!exists)
|
||||
return
|
||||
|
||||
if (prefix && isAbsolute(prefix) && !this.nomount) {
|
||||
var trail = /[\/\\]$/.test(prefix)
|
||||
if (prefix.charAt(0) === '/') {
|
||||
prefix = path.join(this.root, prefix)
|
||||
} else {
|
||||
prefix = path.resolve(this.root, prefix)
|
||||
if (trail)
|
||||
prefix += '/'
|
||||
}
|
||||
}
|
||||
|
||||
if (process.platform === 'win32')
|
||||
prefix = prefix.replace(/\\/g, '/')
|
||||
|
||||
// Mark this as a match
|
||||
this._emitMatch(index, prefix)
|
||||
}
|
||||
|
||||
// Returns either 'DIR', 'FILE', or false
|
||||
GlobSync.prototype._stat = function (f) {
|
||||
var abs = this._makeAbs(f)
|
||||
var needDir = f.slice(-1) === '/'
|
||||
|
||||
if (f.length > this.maxLength)
|
||||
return false
|
||||
|
||||
if (!this.stat && ownProp(this.cache, abs)) {
|
||||
var c = this.cache[abs]
|
||||
|
||||
if (Array.isArray(c))
|
||||
c = 'DIR'
|
||||
|
||||
// It exists, but maybe not how we need it
|
||||
if (!needDir || c === 'DIR')
|
||||
return c
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return false
|
||||
|
||||
// otherwise we have to stat, because maybe c=true
|
||||
// if we know it exists, but not what it is.
|
||||
}
|
||||
|
||||
var exists
|
||||
var stat = this.statCache[abs]
|
||||
if (!stat) {
|
||||
var lstat
|
||||
try {
|
||||
lstat = this.fs.lstatSync(abs)
|
||||
} catch (er) {
|
||||
if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
|
||||
this.statCache[abs] = false
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
if (lstat && lstat.isSymbolicLink()) {
|
||||
try {
|
||||
stat = this.fs.statSync(abs)
|
||||
} catch (er) {
|
||||
stat = lstat
|
||||
}
|
||||
} else {
|
||||
stat = lstat
|
||||
}
|
||||
}
|
||||
|
||||
this.statCache[abs] = stat
|
||||
|
||||
var c = true
|
||||
if (stat)
|
||||
c = stat.isDirectory() ? 'DIR' : 'FILE'
|
||||
|
||||
this.cache[abs] = this.cache[abs] || c
|
||||
|
||||
if (needDir && c === 'FILE')
|
||||
return false
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
GlobSync.prototype._mark = function (p) {
|
||||
return common.mark(this, p)
|
||||
}
|
||||
|
||||
GlobSync.prototype._makeAbs = function (f) {
|
||||
return common.makeAbs(this, f)
|
||||
}
|
||||
15
desktop-operator/node_modules/cacache/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/cacache/node_modules/lru-cache/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
1117
desktop-operator/node_modules/cacache/node_modules/lru-cache/README.md
generated
vendored
Normal file
1117
desktop-operator/node_modules/cacache/node_modules/lru-cache/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
869
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.d.ts
generated
vendored
Normal file
869
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,869 @@
|
||||
// Project: https://github.com/isaacs/node-lru-cache
|
||||
// Based initially on @types/lru-cache
|
||||
// https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
// used under the terms of the MIT License, shown below.
|
||||
//
|
||||
// DefinitelyTyped license:
|
||||
// ------
|
||||
// MIT License
|
||||
//
|
||||
// Copyright (c) Microsoft Corporation.
|
||||
//
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a
|
||||
// copy of this software and associated documentation files (the "Software"),
|
||||
// to deal in the Software without restriction, including without limitation
|
||||
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
// and/or sell copies of the Software, and to permit persons to whom the
|
||||
// Software is furnished to do so, subject to the following conditions:
|
||||
//
|
||||
// The above copyright notice and this permission notice shall be included
|
||||
// in all copies or substantial portions of the Software.
|
||||
//
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
|
||||
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
|
||||
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
|
||||
// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
|
||||
// ------
|
||||
//
|
||||
// Changes by Isaac Z. Schlueter released under the terms found in the
|
||||
// LICENSE file within this project.
|
||||
|
||||
/**
|
||||
* Integer greater than 0, representing some number of milliseconds, or the
|
||||
* time at which a TTL started counting from.
|
||||
*/
|
||||
declare type LRUMilliseconds = number
|
||||
|
||||
/**
|
||||
* An integer greater than 0, reflecting the calculated size of items
|
||||
*/
|
||||
declare type LRUSize = number
|
||||
|
||||
/**
|
||||
* An integer greater than 0, reflecting a number of items
|
||||
*/
|
||||
declare type LRUCount = number
|
||||
|
||||
declare class LRUCache<K, V> implements Iterable<[K, V]> {
|
||||
constructor(options: LRUCache.Options<K, V>)
|
||||
|
||||
/**
|
||||
* Number of items in the cache.
|
||||
* Alias for {@link size}
|
||||
*
|
||||
* @deprecated since 7.0 use {@link size} instead
|
||||
*/
|
||||
public readonly length: LRUCount
|
||||
|
||||
public readonly max: LRUCount
|
||||
public readonly maxSize: LRUSize
|
||||
public readonly maxEntrySize: LRUSize
|
||||
public readonly sizeCalculation:
|
||||
| LRUCache.SizeCalculator<K, V>
|
||||
| undefined
|
||||
public readonly dispose: LRUCache.Disposer<K, V>
|
||||
/**
|
||||
* @since 7.4.0
|
||||
*/
|
||||
public readonly disposeAfter: LRUCache.Disposer<K, V> | null
|
||||
public readonly noDisposeOnSet: boolean
|
||||
public readonly ttl: LRUMilliseconds
|
||||
public readonly ttlResolution: LRUMilliseconds
|
||||
public readonly ttlAutopurge: boolean
|
||||
public readonly allowStale: boolean
|
||||
public readonly updateAgeOnGet: boolean
|
||||
/**
|
||||
* @since 7.11.0
|
||||
*/
|
||||
public readonly noDeleteOnStaleGet: boolean
|
||||
/**
|
||||
* @since 7.6.0
|
||||
*/
|
||||
public readonly fetchMethod: LRUCache.Fetcher<K, V> | null
|
||||
|
||||
/**
|
||||
* The total number of items held in the cache at the current moment.
|
||||
*/
|
||||
public readonly size: LRUCount
|
||||
|
||||
/**
|
||||
* The total size of items in cache when using size tracking.
|
||||
*/
|
||||
public readonly calculatedSize: LRUSize
|
||||
|
||||
/**
|
||||
* Add a value to the cache.
|
||||
*/
|
||||
public set(
|
||||
key: K,
|
||||
value: V,
|
||||
options?: LRUCache.SetOptions<K, V>
|
||||
): this
|
||||
|
||||
/**
|
||||
* Return a value from the cache. Will update the recency of the cache entry
|
||||
* found.
|
||||
*
|
||||
* If the key is not found, {@link get} will return `undefined`. This can be
|
||||
* confusing when setting values specifically to `undefined`, as in
|
||||
* `cache.set(key, undefined)`. Use {@link has} to determine whether a key is
|
||||
* present in the cache at all.
|
||||
*/
|
||||
public get(key: K, options?: LRUCache.GetOptions<V>): V | undefined
|
||||
|
||||
/**
|
||||
* Like {@link get} but doesn't update recency or delete stale items.
|
||||
* Returns `undefined` if the item is stale, unless {@link allowStale} is set
|
||||
* either on the cache or in the options object.
|
||||
*/
|
||||
public peek(key: K, options?: LRUCache.PeekOptions): V | undefined
|
||||
|
||||
/**
|
||||
* Check if a key is in the cache, without updating the recency of use.
|
||||
* Will return false if the item is stale, even though it is technically
|
||||
* in the cache.
|
||||
*
|
||||
* Will not update item age unless {@link updateAgeOnHas} is set in the
|
||||
* options or constructor.
|
||||
*/
|
||||
public has(key: K, options?: LRUCache.HasOptions<V>): boolean
|
||||
|
||||
/**
|
||||
* Deletes a key out of the cache.
|
||||
* Returns true if the key was deleted, false otherwise.
|
||||
*/
|
||||
public delete(key: K): boolean
|
||||
|
||||
/**
|
||||
* Clear the cache entirely, throwing away all values.
|
||||
*/
|
||||
public clear(): void
|
||||
|
||||
/**
|
||||
* Delete any stale entries. Returns true if anything was removed, false
|
||||
* otherwise.
|
||||
*/
|
||||
public purgeStale(): boolean
|
||||
|
||||
/**
|
||||
* Find a value for which the supplied fn method returns a truthy value,
|
||||
* similar to Array.find(). fn is called as fn(value, key, cache).
|
||||
*/
|
||||
public find(
|
||||
callbackFn: (
|
||||
value: V,
|
||||
key: K,
|
||||
cache: this
|
||||
) => boolean | undefined | void,
|
||||
options?: LRUCache.GetOptions<V>
|
||||
): V | undefined
|
||||
|
||||
/**
|
||||
* Call the supplied function on each item in the cache, in order from
|
||||
* most recently used to least recently used. fn is called as
|
||||
* fn(value, key, cache). Does not update age or recenty of use.
|
||||
*/
|
||||
public forEach<T = this>(
|
||||
callbackFn: (this: T, value: V, key: K, cache: this) => void,
|
||||
thisArg?: T
|
||||
): void
|
||||
|
||||
/**
|
||||
* The same as {@link forEach} but items are iterated over in reverse
|
||||
* order. (ie, less recently used items are iterated over first.)
|
||||
*/
|
||||
public rforEach<T = this>(
|
||||
callbackFn: (this: T, value: V, key: K, cache: this) => void,
|
||||
thisArg?: T
|
||||
): void
|
||||
|
||||
/**
|
||||
* Return a generator yielding the keys in the cache,
|
||||
* in order from most recently used to least recently used.
|
||||
*/
|
||||
public keys(): Generator<K, void, void>
|
||||
|
||||
/**
|
||||
* Inverse order version of {@link keys}
|
||||
*
|
||||
* Return a generator yielding the keys in the cache,
|
||||
* in order from least recently used to most recently used.
|
||||
*/
|
||||
public rkeys(): Generator<K, void, void>
|
||||
|
||||
/**
|
||||
* Return a generator yielding the values in the cache,
|
||||
* in order from most recently used to least recently used.
|
||||
*/
|
||||
public values(): Generator<V, void, void>
|
||||
|
||||
/**
|
||||
* Inverse order version of {@link values}
|
||||
*
|
||||
* Return a generator yielding the values in the cache,
|
||||
* in order from least recently used to most recently used.
|
||||
*/
|
||||
public rvalues(): Generator<V, void, void>
|
||||
|
||||
/**
|
||||
* Return a generator yielding `[key, value]` pairs,
|
||||
* in order from most recently used to least recently used.
|
||||
*/
|
||||
public entries(): Generator<[K, V], void, void>
|
||||
|
||||
/**
|
||||
* Inverse order version of {@link entries}
|
||||
*
|
||||
* Return a generator yielding `[key, value]` pairs,
|
||||
* in order from least recently used to most recently used.
|
||||
*/
|
||||
public rentries(): Generator<[K, V], void, void>
|
||||
|
||||
/**
|
||||
* Iterating over the cache itself yields the same results as
|
||||
* {@link entries}
|
||||
*/
|
||||
public [Symbol.iterator](): Generator<[K, V], void, void>
|
||||
|
||||
/**
|
||||
* Return an array of [key, entry] objects which can be passed to
|
||||
* cache.load()
|
||||
*/
|
||||
public dump(): Array<[K, LRUCache.Entry<V>]>
|
||||
|
||||
/**
|
||||
* Reset the cache and load in the items in entries in the order listed.
|
||||
* Note that the shape of the resulting cache may be different if the
|
||||
* same options are not used in both caches.
|
||||
*/
|
||||
public load(
|
||||
cacheEntries: ReadonlyArray<[K, LRUCache.Entry<V>]>
|
||||
): void
|
||||
|
||||
/**
|
||||
* Evict the least recently used item, returning its value or `undefined`
|
||||
* if cache is empty.
|
||||
*/
|
||||
public pop(): V | undefined
|
||||
|
||||
/**
|
||||
* Deletes a key out of the cache.
|
||||
*
|
||||
* @deprecated since 7.0 use delete() instead
|
||||
*/
|
||||
public del(key: K): boolean
|
||||
|
||||
/**
|
||||
* Clear the cache entirely, throwing away all values.
|
||||
*
|
||||
* @deprecated since 7.0 use clear() instead
|
||||
*/
|
||||
public reset(): void
|
||||
|
||||
/**
|
||||
* Manually iterates over the entire cache proactively pruning old entries.
|
||||
*
|
||||
* @deprecated since 7.0 use purgeStale() instead
|
||||
*/
|
||||
public prune(): boolean
|
||||
|
||||
/**
|
||||
* Make an asynchronous cached fetch using the {@link fetchMethod} function.
|
||||
*
|
||||
* If multiple fetches for the same key are issued, then they will all be
|
||||
* coalesced into a single call to fetchMethod.
|
||||
*
|
||||
* Note that this means that handling options such as
|
||||
* {@link allowStaleOnFetchAbort}, {@link signal}, and
|
||||
* {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch()
|
||||
* call for a given key.
|
||||
*
|
||||
* This is a known (fixable) shortcoming which will be addresed on when
|
||||
* someone complains about it, as the fix would involve added complexity and
|
||||
* may not be worth the costs for this edge case.
|
||||
*
|
||||
* since: 7.6.0
|
||||
*/
|
||||
public fetch(
|
||||
key: K,
|
||||
options?: LRUCache.FetchOptions<K, V>
|
||||
): Promise<V>
|
||||
|
||||
/**
|
||||
* since: 7.6.0
|
||||
*/
|
||||
public getRemainingTTL(key: K): LRUMilliseconds
|
||||
}
|
||||
|
||||
declare namespace LRUCache {
|
||||
type DisposeReason = 'evict' | 'set' | 'delete'
|
||||
|
||||
type SizeCalculator<K, V> = (value: V, key: K) => LRUSize
|
||||
type Disposer<K, V> = (
|
||||
value: V,
|
||||
key: K,
|
||||
reason: DisposeReason
|
||||
) => void
|
||||
type Fetcher<K, V> = (
|
||||
key: K,
|
||||
staleValue: V | undefined,
|
||||
options: FetcherOptions<K, V>
|
||||
) => Promise<V | void | undefined> | V | void | undefined
|
||||
|
||||
interface DeprecatedOptions<K, V> {
|
||||
/**
|
||||
* alias for ttl
|
||||
*
|
||||
* @deprecated since 7.0 use options.ttl instead
|
||||
*/
|
||||
maxAge?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* alias for {@link sizeCalculation}
|
||||
*
|
||||
* @deprecated since 7.0 use {@link sizeCalculation} instead
|
||||
*/
|
||||
length?: SizeCalculator<K, V>
|
||||
|
||||
/**
|
||||
* alias for allowStale
|
||||
*
|
||||
* @deprecated since 7.0 use options.allowStale instead
|
||||
*/
|
||||
stale?: boolean
|
||||
}
|
||||
|
||||
interface LimitedByCount {
|
||||
/**
|
||||
* The number of most recently used items to keep.
|
||||
* Note that we may store fewer items than this if maxSize is hit.
|
||||
*/
|
||||
max: LRUCount
|
||||
}
|
||||
|
||||
type MaybeMaxEntrySizeLimit<K, V> =
|
||||
| {
|
||||
/**
|
||||
* The maximum allowed size for any single item in the cache.
|
||||
*
|
||||
* If a larger item is passed to {@link set} or returned by a
|
||||
* {@link fetchMethod}, then it will not be stored in the cache.
|
||||
*/
|
||||
maxEntrySize: LRUSize
|
||||
sizeCalculation?: SizeCalculator<K, V>
|
||||
}
|
||||
| {}
|
||||
|
||||
interface LimitedBySize<K, V> {
|
||||
/**
|
||||
* If you wish to track item size, you must provide a maxSize
|
||||
* note that we still will only keep up to max *actual items*,
|
||||
* if max is set, so size tracking may cause fewer than max items
|
||||
* to be stored. At the extreme, a single item of maxSize size
|
||||
* will cause everything else in the cache to be dropped when it
|
||||
* is added. Use with caution!
|
||||
*
|
||||
* Note also that size tracking can negatively impact performance,
|
||||
* though for most cases, only minimally.
|
||||
*/
|
||||
maxSize: LRUSize
|
||||
|
||||
/**
|
||||
* Function to calculate size of items. Useful if storing strings or
|
||||
* buffers or other items where memory size depends on the object itself.
|
||||
*
|
||||
* Items larger than {@link maxEntrySize} will not be stored in the cache.
|
||||
*
|
||||
* Note that when {@link maxSize} or {@link maxEntrySize} are set, every
|
||||
* item added MUST have a size specified, either via a `sizeCalculation` in
|
||||
* the constructor, or `sizeCalculation` or {@link size} options to
|
||||
* {@link set}.
|
||||
*/
|
||||
sizeCalculation?: SizeCalculator<K, V>
|
||||
}
|
||||
|
||||
interface LimitedByTTL {
|
||||
/**
|
||||
* Max time in milliseconds for items to live in cache before they are
|
||||
* considered stale. Note that stale items are NOT preemptively removed
|
||||
* by default, and MAY live in the cache, contributing to its LRU max,
|
||||
* long after they have expired.
|
||||
*
|
||||
* Also, as this cache is optimized for LRU/MRU operations, some of
|
||||
* the staleness/TTL checks will reduce performance, as they will incur
|
||||
* overhead by deleting items.
|
||||
*
|
||||
* Must be an integer number of ms, defaults to 0, which means "no TTL"
|
||||
*/
|
||||
ttl: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* Boolean flag to tell the cache to not update the TTL when
|
||||
* setting a new value for an existing key (ie, when updating a value
|
||||
* rather than inserting a new value). Note that the TTL value is
|
||||
* _always_ set (if provided) when adding a new entry into the cache.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.4.0
|
||||
*/
|
||||
noUpdateTTL?: boolean
|
||||
|
||||
/**
|
||||
* Minimum amount of time in ms in which to check for staleness.
|
||||
* Defaults to 1, which means that the current time is checked
|
||||
* at most once per millisecond.
|
||||
*
|
||||
* Set to 0 to check the current time every time staleness is tested.
|
||||
* (This reduces performance, and is theoretically unnecessary.)
|
||||
*
|
||||
* Setting this to a higher value will improve performance somewhat
|
||||
* while using ttl tracking, albeit at the expense of keeping stale
|
||||
* items around a bit longer than their TTLs would indicate.
|
||||
*
|
||||
* @default 1
|
||||
* @since 7.1.0
|
||||
*/
|
||||
ttlResolution?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* Preemptively remove stale items from the cache.
|
||||
* Note that this may significantly degrade performance,
|
||||
* especially if the cache is storing a large number of items.
|
||||
* It is almost always best to just leave the stale items in
|
||||
* the cache, and let them fall out as new items are added.
|
||||
*
|
||||
* Note that this means that {@link allowStale} is a bit pointless,
|
||||
* as stale items will be deleted almost as soon as they expire.
|
||||
*
|
||||
* Use with caution!
|
||||
*
|
||||
* @default false
|
||||
* @since 7.1.0
|
||||
*/
|
||||
ttlAutopurge?: boolean
|
||||
|
||||
/**
|
||||
* Return stale items from {@link get} before disposing of them.
|
||||
* Return stale values from {@link fetch} while performing a call
|
||||
* to the {@link fetchMethod} in the background.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
allowStale?: boolean
|
||||
|
||||
/**
|
||||
* Update the age of items on {@link get}, renewing their TTL
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
updateAgeOnGet?: boolean
|
||||
|
||||
/**
|
||||
* Do not delete stale items when they are retrieved with {@link get}.
|
||||
* Note that the {@link get} return value will still be `undefined` unless
|
||||
* allowStale is true.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.11.0
|
||||
*/
|
||||
noDeleteOnStaleGet?: boolean
|
||||
|
||||
/**
|
||||
* Update the age of items on {@link has}, renewing their TTL
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
updateAgeOnHas?: boolean
|
||||
}
|
||||
|
||||
type SafetyBounds<K, V> =
|
||||
| LimitedByCount
|
||||
| LimitedBySize<K, V>
|
||||
| LimitedByTTL
|
||||
|
||||
// options shared by all three of the limiting scenarios
|
||||
interface SharedOptions<K, V> {
|
||||
/**
|
||||
* Function that is called on items when they are dropped from the cache.
|
||||
* This can be handy if you want to close file descriptors or do other
|
||||
* cleanup tasks when items are no longer accessible. Called with `key,
|
||||
* value`. It's called before actually removing the item from the
|
||||
* internal cache, so it is *NOT* safe to re-add them.
|
||||
* Use {@link disposeAfter} if you wish to dispose items after they have
|
||||
* been full removed, when it is safe to add them back to the cache.
|
||||
*/
|
||||
dispose?: Disposer<K, V>
|
||||
|
||||
/**
|
||||
* The same as dispose, but called *after* the entry is completely
|
||||
* removed and the cache is once again in a clean state. It is safe to
|
||||
* add an item right back into the cache at this point.
|
||||
* However, note that it is *very* easy to inadvertently create infinite
|
||||
* recursion this way.
|
||||
*
|
||||
* @since 7.3.0
|
||||
*/
|
||||
disposeAfter?: Disposer<K, V>
|
||||
|
||||
/**
|
||||
* Set to true to suppress calling the dispose() function if the entry
|
||||
* key is still accessible within the cache.
|
||||
* This may be overridden by passing an options object to {@link set}.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noDisposeOnSet?: boolean
|
||||
|
||||
/**
|
||||
* Function that is used to make background asynchronous fetches. Called
|
||||
* with `fetchMethod(key, staleValue, { signal, options, context })`.
|
||||
*
|
||||
* If `fetchMethod` is not provided, then {@link fetch} is
|
||||
* equivalent to `Promise.resolve(cache.get(key))`.
|
||||
*
|
||||
* The `fetchMethod` should ONLY return `undefined` in cases where the
|
||||
* abort controller has sent an abort signal.
|
||||
*
|
||||
* @since 7.6.0
|
||||
*/
|
||||
fetchMethod?: LRUCache.Fetcher<K, V>
|
||||
|
||||
/**
|
||||
* Set to true to suppress the deletion of stale data when a
|
||||
* {@link fetchMethod} throws an error or returns a rejected promise
|
||||
*
|
||||
* This may be overridden in the {@link fetchMethod}.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.10.0
|
||||
*/
|
||||
noDeleteOnFetchRejection?: boolean
|
||||
|
||||
/**
|
||||
* Set to true to allow returning stale data when a {@link fetchMethod}
|
||||
* throws an error or returns a rejected promise. Note that this
|
||||
* differs from using {@link allowStale} in that stale data will
|
||||
* ONLY be returned in the case that the fetch fails, not any other
|
||||
* times.
|
||||
*
|
||||
* This may be overridden in the {@link fetchMethod}.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.16.0
|
||||
*/
|
||||
allowStaleOnFetchRejection?: boolean
|
||||
|
||||
/**
|
||||
*
|
||||
* Set to true to ignore the `abort` event emitted by the `AbortSignal`
|
||||
* object passed to {@link fetchMethod}, and still cache the
|
||||
* resulting resolution value, as long as it is not `undefined`.
|
||||
*
|
||||
* When used on its own, this means aborted {@link fetch} calls are not
|
||||
* immediately resolved or rejected when they are aborted, and instead take
|
||||
* the full time to await.
|
||||
*
|
||||
* When used with {@link allowStaleOnFetchAbort}, aborted {@link fetch}
|
||||
* calls will resolve immediately to their stale cached value or
|
||||
* `undefined`, and will continue to process and eventually update the
|
||||
* cache when they resolve, as long as the resulting value is not
|
||||
* `undefined`, thus supporting a "return stale on timeout while
|
||||
* refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal.
|
||||
*
|
||||
* **Note**: regardless of this setting, an `abort` event _is still emitted
|
||||
* on the `AbortSignal` object_, so may result in invalid results when
|
||||
* passed to other underlying APIs that use AbortSignals.
|
||||
*
|
||||
* This may be overridden in the {@link fetchMethod} or the call to
|
||||
* {@link fetch}.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.17.0
|
||||
*/
|
||||
ignoreFetchAbort?: boolean
|
||||
|
||||
/**
|
||||
* Set to true to return a stale value from the cache when the
|
||||
* `AbortSignal` passed to the {@link fetchMethod} dispatches an `'abort'`
|
||||
* event, whether user-triggered, or due to internal cache behavior.
|
||||
*
|
||||
* Unless {@link ignoreFetchAbort} is also set, the underlying
|
||||
* {@link fetchMethod} will still be considered canceled, and its return
|
||||
* value will be ignored and not cached.
|
||||
*
|
||||
* This may be overridden in the {@link fetchMethod} or the call to
|
||||
* {@link fetch}.
|
||||
*
|
||||
* @default false
|
||||
* @since 7.17.0
|
||||
*/
|
||||
allowStaleOnFetchAbort?: boolean
|
||||
|
||||
/**
|
||||
* Set to any value in the constructor or {@link fetch} options to
|
||||
* pass arbitrary data to the {@link fetchMethod} in the {@link context}
|
||||
* options field.
|
||||
*
|
||||
* @since 7.12.0
|
||||
*/
|
||||
fetchContext?: any
|
||||
}
|
||||
|
||||
type Options<K, V> = SharedOptions<K, V> &
|
||||
DeprecatedOptions<K, V> &
|
||||
SafetyBounds<K, V> &
|
||||
MaybeMaxEntrySizeLimit<K, V>
|
||||
|
||||
/**
|
||||
* options which override the options set in the LRUCache constructor
|
||||
* when making calling {@link set}.
|
||||
*/
|
||||
interface SetOptions<K, V> {
|
||||
/**
|
||||
* A value for the size of the entry, prevents calls to
|
||||
* {@link sizeCalculation}.
|
||||
*
|
||||
* Items larger than {@link maxEntrySize} will not be stored in the cache.
|
||||
*
|
||||
* Note that when {@link maxSize} or {@link maxEntrySize} are set, every
|
||||
* item added MUST have a size specified, either via a `sizeCalculation` in
|
||||
* the constructor, or {@link sizeCalculation} or `size` options to
|
||||
* {@link set}.
|
||||
*/
|
||||
size?: LRUSize
|
||||
/**
|
||||
* Overrides the {@link sizeCalculation} method set in the constructor.
|
||||
*
|
||||
* Items larger than {@link maxEntrySize} will not be stored in the cache.
|
||||
*
|
||||
* Note that when {@link maxSize} or {@link maxEntrySize} are set, every
|
||||
* item added MUST have a size specified, either via a `sizeCalculation` in
|
||||
* the constructor, or `sizeCalculation` or {@link size} options to
|
||||
* {@link set}.
|
||||
*/
|
||||
sizeCalculation?: SizeCalculator<K, V>
|
||||
ttl?: LRUMilliseconds
|
||||
start?: LRUMilliseconds
|
||||
noDisposeOnSet?: boolean
|
||||
noUpdateTTL?: boolean
|
||||
status?: Status<V>
|
||||
}
|
||||
|
||||
/**
|
||||
* options which override the options set in the LRUCAche constructor
|
||||
* when calling {@link has}.
|
||||
*/
|
||||
interface HasOptions<V> {
|
||||
updateAgeOnHas?: boolean
|
||||
status: Status<V>
|
||||
}
|
||||
|
||||
/**
|
||||
* options which override the options set in the LRUCache constructor
|
||||
* when calling {@link get}.
|
||||
*/
|
||||
interface GetOptions<V> {
|
||||
allowStale?: boolean
|
||||
updateAgeOnGet?: boolean
|
||||
noDeleteOnStaleGet?: boolean
|
||||
status?: Status<V>
|
||||
}
|
||||
|
||||
/**
|
||||
* options which override the options set in the LRUCache constructor
|
||||
* when calling {@link peek}.
|
||||
*/
|
||||
interface PeekOptions {
|
||||
allowStale?: boolean
|
||||
}
|
||||
|
||||
/**
|
||||
* Options object passed to the {@link fetchMethod}
|
||||
*
|
||||
* May be mutated by the {@link fetchMethod} to affect the behavior of the
|
||||
* resulting {@link set} operation on resolution, or in the case of
|
||||
* {@link noDeleteOnFetchRejection}, {@link ignoreFetchAbort}, and
|
||||
* {@link allowStaleOnFetchRejection}, the handling of failure.
|
||||
*/
|
||||
interface FetcherFetchOptions<K, V> {
|
||||
allowStale?: boolean
|
||||
updateAgeOnGet?: boolean
|
||||
noDeleteOnStaleGet?: boolean
|
||||
size?: LRUSize
|
||||
sizeCalculation?: SizeCalculator<K, V>
|
||||
ttl?: LRUMilliseconds
|
||||
noDisposeOnSet?: boolean
|
||||
noUpdateTTL?: boolean
|
||||
noDeleteOnFetchRejection?: boolean
|
||||
allowStaleOnFetchRejection?: boolean
|
||||
ignoreFetchAbort?: boolean
|
||||
allowStaleOnFetchAbort?: boolean
|
||||
status?: Status<V>
|
||||
}
|
||||
|
||||
/**
|
||||
* Status object that may be passed to {@link fetch}, {@link get},
|
||||
* {@link set}, and {@link has}.
|
||||
*/
|
||||
interface Status<V> {
|
||||
/**
|
||||
* The status of a set() operation.
|
||||
*
|
||||
* - add: the item was not found in the cache, and was added
|
||||
* - update: the item was in the cache, with the same value provided
|
||||
* - replace: the item was in the cache, and replaced
|
||||
* - miss: the item was not added to the cache for some reason
|
||||
*/
|
||||
set?: 'add' | 'update' | 'replace' | 'miss'
|
||||
|
||||
/**
|
||||
* the ttl stored for the item, or undefined if ttls are not used.
|
||||
*/
|
||||
ttl?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* the start time for the item, or undefined if ttls are not used.
|
||||
*/
|
||||
start?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* The timestamp used for TTL calculation
|
||||
*/
|
||||
now?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* the remaining ttl for the item, or undefined if ttls are not used.
|
||||
*/
|
||||
remainingTTL?: LRUMilliseconds
|
||||
|
||||
/**
|
||||
* The calculated size for the item, if sizes are used.
|
||||
*/
|
||||
size?: LRUSize
|
||||
|
||||
/**
|
||||
* A flag indicating that the item was not stored, due to exceeding the
|
||||
* {@link maxEntrySize}
|
||||
*/
|
||||
maxEntrySizeExceeded?: true
|
||||
|
||||
/**
|
||||
* The old value, specified in the case of `set:'update'` or
|
||||
* `set:'replace'`
|
||||
*/
|
||||
oldValue?: V
|
||||
|
||||
/**
|
||||
* The results of a {@link has} operation
|
||||
*
|
||||
* - hit: the item was found in the cache
|
||||
* - stale: the item was found in the cache, but is stale
|
||||
* - miss: the item was not found in the cache
|
||||
*/
|
||||
has?: 'hit' | 'stale' | 'miss'
|
||||
|
||||
/**
|
||||
* The status of a {@link fetch} operation.
|
||||
* Note that this can change as the underlying fetch() moves through
|
||||
* various states.
|
||||
*
|
||||
* - inflight: there is another fetch() for this key which is in process
|
||||
* - get: there is no fetchMethod, so {@link get} was called.
|
||||
* - miss: the item is not in cache, and will be fetched.
|
||||
* - hit: the item is in the cache, and was resolved immediately.
|
||||
* - stale: the item is in the cache, but stale.
|
||||
* - refresh: the item is in the cache, and not stale, but
|
||||
* {@link forceRefresh} was specified.
|
||||
*/
|
||||
fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'
|
||||
|
||||
/**
|
||||
* The {@link fetchMethod} was called
|
||||
*/
|
||||
fetchDispatched?: true
|
||||
|
||||
/**
|
||||
* The cached value was updated after a successful call to fetchMethod
|
||||
*/
|
||||
fetchUpdated?: true
|
||||
|
||||
/**
|
||||
* The reason for a fetch() rejection. Either the error raised by the
|
||||
* {@link fetchMethod}, or the reason for an AbortSignal.
|
||||
*/
|
||||
fetchError?: Error
|
||||
|
||||
/**
|
||||
* The fetch received an abort signal
|
||||
*/
|
||||
fetchAborted?: true
|
||||
|
||||
/**
|
||||
* The abort signal received was ignored, and the fetch was allowed to
|
||||
* continue.
|
||||
*/
|
||||
fetchAbortIgnored?: true
|
||||
|
||||
/**
|
||||
* The fetchMethod promise resolved successfully
|
||||
*/
|
||||
fetchResolved?: true
|
||||
|
||||
/**
|
||||
* The fetchMethod promise was rejected
|
||||
*/
|
||||
fetchRejected?: true
|
||||
|
||||
/**
|
||||
* The status of a {@link get} operation.
|
||||
*
|
||||
* - fetching: The item is currently being fetched. If a previous value is
|
||||
* present and allowed, that will be returned.
|
||||
* - stale: The item is in the cache, and is stale.
|
||||
* - hit: the item is in the cache
|
||||
* - miss: the item is not in the cache
|
||||
*/
|
||||
get?: 'stale' | 'hit' | 'miss'
|
||||
|
||||
/**
|
||||
* A fetch or get operation returned a stale value.
|
||||
*/
|
||||
returnedStale?: true
|
||||
}
|
||||
|
||||
/**
|
||||
* options which override the options set in the LRUCache constructor
|
||||
* when calling {@link fetch}.
|
||||
*
|
||||
* This is the union of GetOptions and SetOptions, plus
|
||||
* {@link noDeleteOnFetchRejection}, {@link allowStaleOnFetchRejection},
|
||||
* {@link forceRefresh}, and {@link fetchContext}
|
||||
*/
|
||||
interface FetchOptions<K, V> extends FetcherFetchOptions<K, V> {
|
||||
forceRefresh?: boolean
|
||||
fetchContext?: any
|
||||
signal?: AbortSignal
|
||||
status?: Status<V>
|
||||
}
|
||||
|
||||
interface FetcherOptions<K, V> {
|
||||
signal: AbortSignal
|
||||
options: FetcherFetchOptions<K, V>
|
||||
/**
|
||||
* Object provided in the {@link fetchContext} option
|
||||
*/
|
||||
context: any
|
||||
}
|
||||
|
||||
interface Entry<V> {
|
||||
value: V
|
||||
ttl?: LRUMilliseconds
|
||||
size?: LRUSize
|
||||
start?: LRUMilliseconds
|
||||
}
|
||||
}
|
||||
|
||||
export = LRUCache
|
||||
1227
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.js
generated
vendored
Normal file
1227
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1227
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.mjs
generated
vendored
Normal file
1227
desktop-operator/node_modules/cacache/node_modules/lru-cache/index.mjs
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
96
desktop-operator/node_modules/cacache/node_modules/lru-cache/package.json
generated
vendored
Normal file
96
desktop-operator/node_modules/cacache/node_modules/lru-cache/package.json
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
{
|
||||
"name": "lru-cache",
|
||||
"description": "A cache object that deletes the least-recently-used items.",
|
||||
"version": "7.18.3",
|
||||
"author": "Isaac Z. Schlueter <i@izs.me>",
|
||||
"keywords": [
|
||||
"mru",
|
||||
"lru",
|
||||
"cache"
|
||||
],
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "npm run prepare",
|
||||
"pretest": "npm run prepare",
|
||||
"presnap": "npm run prepare",
|
||||
"prepare": "node ./scripts/transpile-to-esm.js",
|
||||
"size": "size-limit",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"format": "prettier --write .",
|
||||
"typedoc": "typedoc ./index.d.ts"
|
||||
},
|
||||
"type": "commonjs",
|
||||
"main": "./index.js",
|
||||
"module": "./index.mjs",
|
||||
"types": "./index.d.ts",
|
||||
"exports": {
|
||||
".": {
|
||||
"import": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.mjs"
|
||||
},
|
||||
"require": {
|
||||
"types": "./index.d.ts",
|
||||
"default": "./index.js"
|
||||
}
|
||||
},
|
||||
"./package.json": "./package.json"
|
||||
},
|
||||
"repository": "git://github.com/isaacs/node-lru-cache.git",
|
||||
"devDependencies": {
|
||||
"@size-limit/preset-small-lib": "^7.0.8",
|
||||
"@types/node": "^17.0.31",
|
||||
"@types/tap": "^15.0.6",
|
||||
"benchmark": "^2.1.4",
|
||||
"c8": "^7.11.2",
|
||||
"clock-mock": "^1.0.6",
|
||||
"eslint-config-prettier": "^8.5.0",
|
||||
"prettier": "^2.6.2",
|
||||
"size-limit": "^7.0.8",
|
||||
"tap": "^16.3.4",
|
||||
"ts-node": "^10.7.0",
|
||||
"tslib": "^2.4.0",
|
||||
"typedoc": "^0.23.24",
|
||||
"typescript": "^4.6.4"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"index.js",
|
||||
"index.mjs",
|
||||
"index.d.ts"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"prettier": {
|
||||
"semi": false,
|
||||
"printWidth": 70,
|
||||
"tabWidth": 2,
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"jsxSingleQuote": false,
|
||||
"bracketSameLine": true,
|
||||
"arrowParens": "avoid",
|
||||
"endOfLine": "lf"
|
||||
},
|
||||
"tap": {
|
||||
"nyc-arg": [
|
||||
"--include=index.js"
|
||||
],
|
||||
"node-arg": [
|
||||
"--expose-gc",
|
||||
"--require",
|
||||
"ts-node/register"
|
||||
],
|
||||
"ts": false
|
||||
},
|
||||
"size-limit": [
|
||||
{
|
||||
"path": "./index.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
15
desktop-operator/node_modules/cacache/node_modules/minimatch/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/cacache/node_modules/minimatch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
259
desktop-operator/node_modules/cacache/node_modules/minimatch/README.md
generated
vendored
Normal file
259
desktop-operator/node_modules/cacache/node_modules/minimatch/README.md
generated
vendored
Normal file
@@ -0,0 +1,259 @@
|
||||
# minimatch
|
||||
|
||||
A minimal matching utility.
|
||||
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
||||
It works by converting glob expressions into JavaScript `RegExp`
|
||||
objects.
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var minimatch = require("minimatch")
|
||||
|
||||
minimatch("bar.foo", "*.foo") // true!
|
||||
minimatch("bar.foo", "*.bar") // false!
|
||||
minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
* Brace Expansion
|
||||
* Extended glob matching
|
||||
* "Globstar" `**` matching
|
||||
|
||||
See:
|
||||
|
||||
* `man sh`
|
||||
* `man bash`
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
|
||||
## Windows
|
||||
|
||||
**Please only use forward-slashes in glob expressions.**
|
||||
|
||||
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||
characters are used by this glob implementation. You must use
|
||||
forward-slashes **only** in glob expressions. Back-slashes in patterns
|
||||
will always be interpreted as escape characters, not path separators.
|
||||
|
||||
Note that `\` or `/` _will_ be interpreted as path separators in paths on
|
||||
Windows, and will match against `/` in glob expressions.
|
||||
|
||||
So just always use `/` in patterns.
|
||||
|
||||
## Minimatch Class
|
||||
|
||||
Create a minimatch object by instantiating the `minimatch.Minimatch` class.
|
||||
|
||||
```javascript
|
||||
var Minimatch = require("minimatch").Minimatch
|
||||
var mm = new Minimatch(pattern, options)
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
* `pattern` The original pattern the minimatch object represents.
|
||||
* `options` The options supplied to the constructor.
|
||||
* `set` A 2-dimensional array of regexp or string expressions.
|
||||
Each row in the
|
||||
array corresponds to a brace-expanded pattern. Each item in the row
|
||||
corresponds to a single path-part. For example, the pattern
|
||||
`{a,b/c}/d` would expand to a set of patterns like:
|
||||
|
||||
[ [ a, d ]
|
||||
, [ b, c, d ] ]
|
||||
|
||||
If a portion of the pattern doesn't have any "magic" in it
|
||||
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||
will be left as a string rather than converted to a regular
|
||||
expression.
|
||||
|
||||
* `regexp` Created by the `makeRe` method. A single regular expression
|
||||
expressing the entire pattern. This is useful in cases where you wish
|
||||
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||
* `negate` True if the pattern is negated.
|
||||
* `comment` True if the pattern is a comment.
|
||||
* `empty` True if the pattern is `""`.
|
||||
|
||||
### Methods
|
||||
|
||||
* `makeRe` Generate the `regexp` member if necessary, and return it.
|
||||
Will return `false` if the pattern is invalid.
|
||||
* `match(fname)` Return true if the filename matches the pattern, or
|
||||
false otherwise.
|
||||
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||
filename, and match it against a single row in the `regExpSet`. This
|
||||
method is mainly for internal use, but is exposed so that it can be
|
||||
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
|
||||
All other methods are internal, and will be called as necessary.
|
||||
|
||||
### minimatch(path, pattern, options)
|
||||
|
||||
Main export. Tests a path against the pattern using the options.
|
||||
|
||||
```javascript
|
||||
var isJS = minimatch(file, "*.js", { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.filter(pattern, options)
|
||||
|
||||
Returns a function that tests its
|
||||
supplied argument, suitable for use with `Array.filter`. Example:
|
||||
|
||||
```javascript
|
||||
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.match(list, pattern, options)
|
||||
|
||||
Match against the list of
|
||||
files, in the style of fnmatch or glob. If nothing is matched, and
|
||||
options.nonull is set, then return a list containing the pattern itself.
|
||||
|
||||
```javascript
|
||||
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})
|
||||
```
|
||||
|
||||
### minimatch.makeRe(pattern, options)
|
||||
|
||||
Make a regular expression object from the pattern.
|
||||
|
||||
## Options
|
||||
|
||||
All options are `false` by default.
|
||||
|
||||
### debug
|
||||
|
||||
Dump a ton of stuff to stderr.
|
||||
|
||||
### nobrace
|
||||
|
||||
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
|
||||
### noglobstar
|
||||
|
||||
Disable `**` matching against multiple folder names.
|
||||
|
||||
### dot
|
||||
|
||||
Allow patterns to match filenames starting with a period, even if
|
||||
the pattern does not explicitly have a period in that spot.
|
||||
|
||||
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||
is set.
|
||||
|
||||
### noext
|
||||
|
||||
Disable "extglob" style patterns like `+(a|b)`.
|
||||
|
||||
### nocase
|
||||
|
||||
Perform a case-insensitive match.
|
||||
|
||||
### nonull
|
||||
|
||||
When a match is not found by `minimatch.match`, return a list containing
|
||||
the pattern itself if this option is set. When not set, an empty list
|
||||
is returned if there are no matches.
|
||||
|
||||
### matchBase
|
||||
|
||||
If set, then patterns without slashes will be matched
|
||||
against the basename of the path if it contains slashes. For example,
|
||||
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
|
||||
### nocomment
|
||||
|
||||
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||
comment.
|
||||
|
||||
### nonegate
|
||||
|
||||
Suppress the behavior of treating a leading `!` character as negation.
|
||||
|
||||
### flipNegate
|
||||
|
||||
Returns from negate expressions the same as if they were not negated.
|
||||
(Ie, true on a hit, false on a miss.)
|
||||
|
||||
### partial
|
||||
|
||||
Compare a partial path to a pattern. As long as the parts of the path that
|
||||
are present are not contradicted by the pattern, it will be treated as a
|
||||
match. This is useful in applications where you're walking through a
|
||||
folder structure, and don't yet have the full path, but want to ensure that
|
||||
you do not walk down paths that can never be a match.
|
||||
|
||||
For example,
|
||||
|
||||
```js
|
||||
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||
```
|
||||
|
||||
### windowsPathsNoEscape
|
||||
|
||||
Use `\\` as a path separator _only_, and _never_ as an escape
|
||||
character. If set, all `\\` characters are replaced with `/` in
|
||||
the pattern. Note that this makes it **impossible** to match
|
||||
against paths containing literal glob pattern characters, but
|
||||
allows matching with patterns constructed using `path.join()` and
|
||||
`path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||
behavior of earlier versions on Windows. Please use with
|
||||
caution, and be mindful of [the caveat about Windows
|
||||
paths](#windows).
|
||||
|
||||
For legacy reasons, this is also set if
|
||||
`options.allowWindowsEscape` is set to the exact value `false`.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between minimatch and other
|
||||
implementations, and are intentional.
|
||||
|
||||
If the pattern starts with a `!` character, then it is negated. Set the
|
||||
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||
characters normally. This is perhaps relevant if you wish to start the
|
||||
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||
characters at the start of a pattern will negate the pattern multiple
|
||||
times.
|
||||
|
||||
If a pattern starts with `#`, then it is treated as a comment, and
|
||||
will not match anything. Use `\#` to match a literal `#` at the
|
||||
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.1, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then minimatch.match returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
Note that `fnmatch(3)` in libc is an extremely naive string comparison
|
||||
matcher, which does not do anything special for slashes. This library is
|
||||
designed to be used in glob searching and file walkers, and so it does do
|
||||
special things with `/`. Thus, `foo*` will not match `foo/bar` in this
|
||||
library, even though it would in `fnmatch(3)`.
|
||||
4
desktop-operator/node_modules/cacache/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
4
desktop-operator/node_modules/cacache/node_modules/minimatch/lib/path.js
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
const isWindows = typeof process === 'object' &&
|
||||
process &&
|
||||
process.platform === 'win32'
|
||||
module.exports = isWindows ? { sep: '\\' } : { sep: '/' }
|
||||
944
desktop-operator/node_modules/cacache/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
944
desktop-operator/node_modules/cacache/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
@@ -0,0 +1,944 @@
|
||||
const minimatch = module.exports = (p, pattern, options = {}) => {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
// shortcut: comments match nothing.
|
||||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||
return false
|
||||
}
|
||||
|
||||
return new Minimatch(pattern, options).match(p)
|
||||
}
|
||||
|
||||
module.exports = minimatch
|
||||
|
||||
const path = require('./lib/path.js')
|
||||
minimatch.sep = path.sep
|
||||
|
||||
const GLOBSTAR = Symbol('globstar **')
|
||||
minimatch.GLOBSTAR = GLOBSTAR
|
||||
const expand = require('brace-expansion')
|
||||
|
||||
const plTypes = {
|
||||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||
'?': { open: '(?:', close: ')?' },
|
||||
'+': { open: '(?:', close: ')+' },
|
||||
'*': { open: '(?:', close: ')*' },
|
||||
'@': { open: '(?:', close: ')' }
|
||||
}
|
||||
|
||||
// any single thing other than /
|
||||
// don't need to escape / when using new RegExp()
|
||||
const qmark = '[^/]'
|
||||
|
||||
// * => any number of characters
|
||||
const star = qmark + '*?'
|
||||
|
||||
// ** when dots are allowed. Anything goes, except .. and .
|
||||
// not (^ or / followed by one or two dots followed by $ or /),
|
||||
// followed by anything, any number of times.
|
||||
const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
|
||||
|
||||
// not a ^ or / followed by a dot,
|
||||
// followed by anything, any number of times.
|
||||
const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
|
||||
|
||||
// "abc" -> { a:true, b:true, c:true }
|
||||
const charSet = s => s.split('').reduce((set, c) => {
|
||||
set[c] = true
|
||||
return set
|
||||
}, {})
|
||||
|
||||
// characters that need to be escaped in RegExp.
|
||||
const reSpecials = charSet('().*{}+?[]^$\\!')
|
||||
|
||||
// characters that indicate we have to add the pattern start
|
||||
const addPatternStartSet = charSet('[.(')
|
||||
|
||||
// normalizes slashes.
|
||||
const slashSplit = /\/+/
|
||||
|
||||
minimatch.filter = (pattern, options = {}) =>
|
||||
(p, i, list) => minimatch(p, pattern, options)
|
||||
|
||||
const ext = (a, b = {}) => {
|
||||
const t = {}
|
||||
Object.keys(a).forEach(k => t[k] = a[k])
|
||||
Object.keys(b).forEach(k => t[k] = b[k])
|
||||
return t
|
||||
}
|
||||
|
||||
minimatch.defaults = def => {
|
||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||
return minimatch
|
||||
}
|
||||
|
||||
const orig = minimatch
|
||||
|
||||
const m = (p, pattern, options) => orig(p, pattern, ext(def, options))
|
||||
m.Minimatch = class Minimatch extends orig.Minimatch {
|
||||
constructor (pattern, options) {
|
||||
super(pattern, ext(def, options))
|
||||
}
|
||||
}
|
||||
m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch
|
||||
m.filter = (pattern, options) => orig.filter(pattern, ext(def, options))
|
||||
m.defaults = options => orig.defaults(ext(def, options))
|
||||
m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options))
|
||||
m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options))
|
||||
m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options))
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Brace expansion:
|
||||
// a{b,c}d -> abd acd
|
||||
// a{b,}c -> abc ac
|
||||
// a{0..3}d -> a0d a1d a2d a3d
|
||||
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||||
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||||
//
|
||||
// Invalid sets are not expanded.
|
||||
// a{2..}b -> a{2..}b
|
||||
// a{b}c -> a{b}c
|
||||
minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options)
|
||||
|
||||
const braceExpand = (pattern, options = {}) => {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||
// improving this regexp to avoid a ReDOS vulnerability.
|
||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||
// shortcut. no need to expand.
|
||||
return [pattern]
|
||||
}
|
||||
|
||||
return expand(pattern)
|
||||
}
|
||||
|
||||
const MAX_PATTERN_LENGTH = 1024 * 64
|
||||
const assertValidPattern = pattern => {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern')
|
||||
}
|
||||
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
}
|
||||
|
||||
// parse a component of the expanded set.
|
||||
// At this point, no pattern may contain "/" in it
|
||||
// so we're going to return a 2d array, where each entry is the full
|
||||
// pattern, split on '/', and then turned into a regular expression.
|
||||
// A regexp is made at the end which joins each array with an
|
||||
// escaped /, and another full one which joins each regexp with |.
|
||||
//
|
||||
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||||
// when it is the *only* thing in a path portion. Otherwise, any series
|
||||
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||||
// default, and can be disabled by setting options.noglobstar.
|
||||
const SUBPARSE = Symbol('subparse')
|
||||
|
||||
minimatch.makeRe = (pattern, options) =>
|
||||
new Minimatch(pattern, options || {}).makeRe()
|
||||
|
||||
minimatch.match = (list, pattern, options = {}) => {
|
||||
const mm = new Minimatch(pattern, options)
|
||||
list = list.filter(f => mm.match(f))
|
||||
if (mm.options.nonull && !list.length) {
|
||||
list.push(pattern)
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
// replace stuff like \* with *
|
||||
const globUnescape = s => s.replace(/\\(.)/g, '$1')
|
||||
const charUnescape = s => s.replace(/\\([^-\]])/g, '$1')
|
||||
const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||||
const braExpEscape = s => s.replace(/[[\]\\]/g, '\\$&')
|
||||
|
||||
class Minimatch {
|
||||
constructor (pattern, options) {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
this.options = options
|
||||
this.set = []
|
||||
this.pattern = pattern
|
||||
this.windowsPathsNoEscape = !!options.windowsPathsNoEscape ||
|
||||
options.allowWindowsEscape === false
|
||||
if (this.windowsPathsNoEscape) {
|
||||
this.pattern = this.pattern.replace(/\\/g, '/')
|
||||
}
|
||||
this.regexp = null
|
||||
this.negate = false
|
||||
this.comment = false
|
||||
this.empty = false
|
||||
this.partial = !!options.partial
|
||||
|
||||
// make the set of regexps etc.
|
||||
this.make()
|
||||
}
|
||||
|
||||
debug () {}
|
||||
|
||||
make () {
|
||||
const pattern = this.pattern
|
||||
const options = this.options
|
||||
|
||||
// empty patterns and comments match nothing.
|
||||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||
this.comment = true
|
||||
return
|
||||
}
|
||||
if (!pattern) {
|
||||
this.empty = true
|
||||
return
|
||||
}
|
||||
|
||||
// step 1: figure out negation, etc.
|
||||
this.parseNegate()
|
||||
|
||||
// step 2: expand braces
|
||||
let set = this.globSet = this.braceExpand()
|
||||
|
||||
if (options.debug) this.debug = (...args) => console.error(...args)
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// step 3: now we have a set, so turn each one into a series of path-portion
|
||||
// matching patterns.
|
||||
// These will be regexps, except in the case of "**", which is
|
||||
// set to the GLOBSTAR object for globstar behavior,
|
||||
// and will not contain any / characters
|
||||
set = this.globParts = set.map(s => s.split(slashSplit))
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// glob --> regexps
|
||||
set = set.map((s, si, set) => s.map(this.parse, this))
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// filter out everything that didn't compile properly.
|
||||
set = set.filter(s => s.indexOf(false) === -1)
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
this.set = set
|
||||
}
|
||||
|
||||
parseNegate () {
|
||||
if (this.options.nonegate) return
|
||||
|
||||
const pattern = this.pattern
|
||||
let negate = false
|
||||
let negateOffset = 0
|
||||
|
||||
for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
|
||||
negate = !negate
|
||||
negateOffset++
|
||||
}
|
||||
|
||||
if (negateOffset) this.pattern = pattern.slice(negateOffset)
|
||||
this.negate = negate
|
||||
}
|
||||
|
||||
// set partial to true to test if, for example,
|
||||
// "/a/b" matches the start of "/*/b/*/d"
|
||||
// Partial means, if you run out of file before you run
|
||||
// out of pattern, then that's fine, as long as all
|
||||
// the parts match.
|
||||
matchOne (file, pattern, partial) {
|
||||
var options = this.options
|
||||
|
||||
this.debug('matchOne',
|
||||
{ 'this': this, file: file, pattern: pattern })
|
||||
|
||||
this.debug('matchOne', file.length, pattern.length)
|
||||
|
||||
for (var fi = 0,
|
||||
pi = 0,
|
||||
fl = file.length,
|
||||
pl = pattern.length
|
||||
; (fi < fl) && (pi < pl)
|
||||
; fi++, pi++) {
|
||||
this.debug('matchOne loop')
|
||||
var p = pattern[pi]
|
||||
var f = file[fi]
|
||||
|
||||
this.debug(pattern, p, f)
|
||||
|
||||
// should be impossible.
|
||||
// some invalid regexp stuff in the set.
|
||||
/* istanbul ignore if */
|
||||
if (p === false) return false
|
||||
|
||||
if (p === GLOBSTAR) {
|
||||
this.debug('GLOBSTAR', [pattern, p, f])
|
||||
|
||||
// "**"
|
||||
// a/**/b/**/c would match the following:
|
||||
// a/b/x/y/z/c
|
||||
// a/x/y/z/b/c
|
||||
// a/b/x/b/x/c
|
||||
// a/b/c
|
||||
// To do this, take the rest of the pattern after
|
||||
// the **, and see if it would match the file remainder.
|
||||
// If so, return success.
|
||||
// If not, the ** "swallows" a segment, and try again.
|
||||
// This is recursively awful.
|
||||
//
|
||||
// a/**/b/**/c matching a/b/x/y/z/c
|
||||
// - a matches a
|
||||
// - doublestar
|
||||
// - matchOne(b/x/y/z/c, b/**/c)
|
||||
// - b matches b
|
||||
// - doublestar
|
||||
// - matchOne(x/y/z/c, c) -> no
|
||||
// - matchOne(y/z/c, c) -> no
|
||||
// - matchOne(z/c, c) -> no
|
||||
// - matchOne(c, c) yes, hit
|
||||
var fr = fi
|
||||
var pr = pi + 1
|
||||
if (pr === pl) {
|
||||
this.debug('** at the end')
|
||||
// a ** at the end will just swallow the rest.
|
||||
// We have found a match.
|
||||
// however, it will not swallow /.x, unless
|
||||
// options.dot is set.
|
||||
// . and .. are *never* matched by **, for explosively
|
||||
// exponential reasons.
|
||||
for (; fi < fl; fi++) {
|
||||
if (file[fi] === '.' || file[fi] === '..' ||
|
||||
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// ok, let's see if we can swallow whatever we can.
|
||||
while (fr < fl) {
|
||||
var swallowee = file[fr]
|
||||
|
||||
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
|
||||
|
||||
// XXX remove this slice. Just pass the start index.
|
||||
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||||
this.debug('globstar found match!', fr, fl, swallowee)
|
||||
// found a match.
|
||||
return true
|
||||
} else {
|
||||
// can't swallow "." or ".." ever.
|
||||
// can only swallow ".foo" when explicitly asked.
|
||||
if (swallowee === '.' || swallowee === '..' ||
|
||||
(!options.dot && swallowee.charAt(0) === '.')) {
|
||||
this.debug('dot detected!', file, fr, pattern, pr)
|
||||
break
|
||||
}
|
||||
|
||||
// ** swallows a segment, and continue.
|
||||
this.debug('globstar swallow a segment, and continue')
|
||||
fr++
|
||||
}
|
||||
}
|
||||
|
||||
// no match was found.
|
||||
// However, in partial mode, we can't say this is necessarily over.
|
||||
// If there's more *pattern* left, then
|
||||
/* istanbul ignore if */
|
||||
if (partial) {
|
||||
// ran out of file
|
||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||
if (fr === fl) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// something other than **
|
||||
// non-magic patterns just have to match exactly
|
||||
// patterns with magic have been turned into regexps.
|
||||
var hit
|
||||
if (typeof p === 'string') {
|
||||
hit = f === p
|
||||
this.debug('string match', p, f, hit)
|
||||
} else {
|
||||
hit = f.match(p)
|
||||
this.debug('pattern match', p, f, hit)
|
||||
}
|
||||
|
||||
if (!hit) return false
|
||||
}
|
||||
|
||||
// Note: ending in / means that we'll get a final ""
|
||||
// at the end of the pattern. This can only match a
|
||||
// corresponding "" at the end of the file.
|
||||
// If the file ends in /, then it can only match a
|
||||
// a pattern that ends in /, unless the pattern just
|
||||
// doesn't have any more for it. But, a/b/ should *not*
|
||||
// match "a/b/*", even though "" matches against the
|
||||
// [^/]*? pattern, except in partial mode, where it might
|
||||
// simply not be reached yet.
|
||||
// However, a/b/ should still satisfy a/*
|
||||
|
||||
// now either we fell off the end of the pattern, or we're done.
|
||||
if (fi === fl && pi === pl) {
|
||||
// ran out of pattern and filename at the same time.
|
||||
// an exact hit!
|
||||
return true
|
||||
} else if (fi === fl) {
|
||||
// ran out of file, but still had pattern left.
|
||||
// this is ok if we're doing the match as part of
|
||||
// a glob fs traversal.
|
||||
return partial
|
||||
} else /* istanbul ignore else */ if (pi === pl) {
|
||||
// ran out of pattern, still have file left.
|
||||
// this is only acceptable if we're on the very last
|
||||
// empty segment of a file with a trailing slash.
|
||||
// a/* should match a/b/
|
||||
return (fi === fl - 1) && (file[fi] === '')
|
||||
}
|
||||
|
||||
// should be unreachable.
|
||||
/* istanbul ignore next */
|
||||
throw new Error('wtf?')
|
||||
}
|
||||
|
||||
braceExpand () {
|
||||
return braceExpand(this.pattern, this.options)
|
||||
}
|
||||
|
||||
parse (pattern, isSub) {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
const options = this.options
|
||||
|
||||
// shortcuts
|
||||
if (pattern === '**') {
|
||||
if (!options.noglobstar)
|
||||
return GLOBSTAR
|
||||
else
|
||||
pattern = '*'
|
||||
}
|
||||
if (pattern === '') return ''
|
||||
|
||||
let re = ''
|
||||
let hasMagic = false
|
||||
let escaping = false
|
||||
// ? => one single character
|
||||
const patternListStack = []
|
||||
const negativeLists = []
|
||||
let stateChar
|
||||
let inClass = false
|
||||
let reClassStart = -1
|
||||
let classStart = -1
|
||||
let cs
|
||||
let pl
|
||||
let sp
|
||||
// . and .. never match anything that doesn't start with .,
|
||||
// even when options.dot is set. However, if the pattern
|
||||
// starts with ., then traversal patterns can match.
|
||||
let dotTravAllowed = pattern.charAt(0) === '.'
|
||||
let dotFileAllowed = options.dot || dotTravAllowed
|
||||
const patternStart = () =>
|
||||
dotTravAllowed
|
||||
? ''
|
||||
: dotFileAllowed
|
||||
? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))'
|
||||
: '(?!\\.)'
|
||||
const subPatternStart = (p) =>
|
||||
p.charAt(0) === '.'
|
||||
? ''
|
||||
: options.dot
|
||||
? '(?!(?:^|\\/)\\.{1,2}(?:$|\\/))'
|
||||
: '(?!\\.)'
|
||||
|
||||
|
||||
const clearStateChar = () => {
|
||||
if (stateChar) {
|
||||
// we had some state-tracking character
|
||||
// that wasn't consumed by this pass.
|
||||
switch (stateChar) {
|
||||
case '*':
|
||||
re += star
|
||||
hasMagic = true
|
||||
break
|
||||
case '?':
|
||||
re += qmark
|
||||
hasMagic = true
|
||||
break
|
||||
default:
|
||||
re += '\\' + stateChar
|
||||
break
|
||||
}
|
||||
this.debug('clearStateChar %j %j', stateChar, re)
|
||||
stateChar = false
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) {
|
||||
this.debug('%s\t%s %s %j', pattern, i, re, c)
|
||||
|
||||
// skip over any that are escaped.
|
||||
if (escaping) {
|
||||
/* istanbul ignore next - completely not allowed, even escaped. */
|
||||
if (c === '/') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (reSpecials[c]) {
|
||||
re += '\\'
|
||||
}
|
||||
re += c
|
||||
escaping = false
|
||||
continue
|
||||
}
|
||||
|
||||
switch (c) {
|
||||
/* istanbul ignore next */
|
||||
case '/': {
|
||||
// Should already be path-split by now.
|
||||
return false
|
||||
}
|
||||
|
||||
case '\\':
|
||||
if (inClass && pattern.charAt(i + 1) === '-') {
|
||||
re += c
|
||||
continue
|
||||
}
|
||||
|
||||
clearStateChar()
|
||||
escaping = true
|
||||
continue
|
||||
|
||||
// the various stateChar values
|
||||
// for the "extglob" stuff.
|
||||
case '?':
|
||||
case '*':
|
||||
case '+':
|
||||
case '@':
|
||||
case '!':
|
||||
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
|
||||
|
||||
// all of those are literals inside a class, except that
|
||||
// the glob [!a] means [^a] in regexp
|
||||
if (inClass) {
|
||||
this.debug(' in class')
|
||||
if (c === '!' && i === classStart + 1) c = '^'
|
||||
re += c
|
||||
continue
|
||||
}
|
||||
|
||||
// if we already have a stateChar, then it means
|
||||
// that there was something like ** or +? in there.
|
||||
// Handle the stateChar, then proceed with this one.
|
||||
this.debug('call clearStateChar %j', stateChar)
|
||||
clearStateChar()
|
||||
stateChar = c
|
||||
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||||
// just clear the statechar *now*, rather than even diving into
|
||||
// the patternList stuff.
|
||||
if (options.noext) clearStateChar()
|
||||
continue
|
||||
|
||||
case '(': {
|
||||
if (inClass) {
|
||||
re += '('
|
||||
continue
|
||||
}
|
||||
|
||||
if (!stateChar) {
|
||||
re += '\\('
|
||||
continue
|
||||
}
|
||||
|
||||
const plEntry = {
|
||||
type: stateChar,
|
||||
start: i - 1,
|
||||
reStart: re.length,
|
||||
open: plTypes[stateChar].open,
|
||||
close: plTypes[stateChar].close,
|
||||
}
|
||||
this.debug(this.pattern, '\t', plEntry)
|
||||
patternListStack.push(plEntry)
|
||||
// negation is (?:(?!(?:js)(?:<rest>))[^/]*)
|
||||
re += plEntry.open
|
||||
// next entry starts with a dot maybe?
|
||||
if (plEntry.start === 0 && plEntry.type !== '!') {
|
||||
dotTravAllowed = true
|
||||
re += subPatternStart(pattern.slice(i + 1))
|
||||
}
|
||||
this.debug('plType %j %j', stateChar, re)
|
||||
stateChar = false
|
||||
continue
|
||||
}
|
||||
|
||||
case ')': {
|
||||
const plEntry = patternListStack[patternListStack.length - 1]
|
||||
if (inClass || !plEntry) {
|
||||
re += '\\)'
|
||||
continue
|
||||
}
|
||||
patternListStack.pop()
|
||||
|
||||
// closing an extglob
|
||||
clearStateChar()
|
||||
hasMagic = true
|
||||
pl = plEntry
|
||||
// negation is (?:(?!js)[^/]*)
|
||||
// The others are (?:<pattern>)<type>
|
||||
re += pl.close
|
||||
if (pl.type === '!') {
|
||||
negativeLists.push(Object.assign(pl, { reEnd: re.length }))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
case '|': {
|
||||
const plEntry = patternListStack[patternListStack.length - 1]
|
||||
if (inClass || !plEntry) {
|
||||
re += '\\|'
|
||||
continue
|
||||
}
|
||||
|
||||
clearStateChar()
|
||||
re += '|'
|
||||
// next subpattern can start with a dot?
|
||||
if (plEntry.start === 0 && plEntry.type !== '!') {
|
||||
dotTravAllowed = true
|
||||
re += subPatternStart(pattern.slice(i + 1))
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// these are mostly the same in regexp and glob
|
||||
case '[':
|
||||
// swallow any state-tracking char before the [
|
||||
clearStateChar()
|
||||
|
||||
if (inClass) {
|
||||
re += '\\' + c
|
||||
continue
|
||||
}
|
||||
|
||||
inClass = true
|
||||
classStart = i
|
||||
reClassStart = re.length
|
||||
re += c
|
||||
continue
|
||||
|
||||
case ']':
|
||||
// a right bracket shall lose its special
|
||||
// meaning and represent itself in
|
||||
// a bracket expression if it occurs
|
||||
// first in the list. -- POSIX.2 2.8.3.2
|
||||
if (i === classStart + 1 || !inClass) {
|
||||
re += '\\' + c
|
||||
continue
|
||||
}
|
||||
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + braExpEscape(charUnescape(cs)) + ']')
|
||||
// looks good, finish up the class.
|
||||
re += c
|
||||
} catch (er) {
|
||||
// out of order ranges in JS are errors, but in glob syntax,
|
||||
// they're just a range that matches nothing.
|
||||
re = re.substring(0, reClassStart) + '(?:$.)' // match nothing ever
|
||||
}
|
||||
hasMagic = true
|
||||
inClass = false
|
||||
continue
|
||||
|
||||
default:
|
||||
// swallow any state char that wasn't consumed
|
||||
clearStateChar()
|
||||
|
||||
if (reSpecials[c] && !(c === '^' && inClass)) {
|
||||
re += '\\'
|
||||
}
|
||||
|
||||
re += c
|
||||
break
|
||||
|
||||
} // switch
|
||||
} // for
|
||||
|
||||
// handle the case where we left a class open.
|
||||
// "[abc" is valid, equivalent to "\[abc"
|
||||
if (inClass) {
|
||||
// split where the last [ was, and escape it
|
||||
// this is a huge pita. We now have to re-walk
|
||||
// the contents of the would-be class to re-translate
|
||||
// any characters that were passed through as-is
|
||||
cs = pattern.slice(classStart + 1)
|
||||
sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substring(0, reClassStart) + '\\[' + sp[0]
|
||||
hasMagic = hasMagic || sp[1]
|
||||
}
|
||||
|
||||
// handle the case where we had a +( thing at the *end*
|
||||
// of the pattern.
|
||||
// each pattern list stack adds 3 chars, and we need to go through
|
||||
// and escape any | chars that were passed through as-is for the regexp.
|
||||
// Go through and escape them, taking care not to double-escape any
|
||||
// | chars that were already escaped.
|
||||
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||||
let tail
|
||||
tail = re.slice(pl.reStart + pl.open.length)
|
||||
this.debug('setting tail', re, pl)
|
||||
// maybe some even number of \, then maybe 1 \, followed by a |
|
||||
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => {
|
||||
/* istanbul ignore else - should already be done */
|
||||
if (!$2) {
|
||||
// the | isn't already escaped, so escape it.
|
||||
$2 = '\\'
|
||||
}
|
||||
|
||||
// need to escape all those slashes *again*, without escaping the
|
||||
// one that we need for escaping the | character. As it works out,
|
||||
// escaping an even number of slashes can be done by simply repeating
|
||||
// it exactly after itself. That's why this trick works.
|
||||
//
|
||||
// I am sorry that you have to see this.
|
||||
return $1 + $1 + $2 + '|'
|
||||
})
|
||||
|
||||
this.debug('tail=%j\n %s', tail, tail, pl, re)
|
||||
const t = pl.type === '*' ? star
|
||||
: pl.type === '?' ? qmark
|
||||
: '\\' + pl.type
|
||||
|
||||
hasMagic = true
|
||||
re = re.slice(0, pl.reStart) + t + '\\(' + tail
|
||||
}
|
||||
|
||||
// handle trailing things that only matter at the very end.
|
||||
clearStateChar()
|
||||
if (escaping) {
|
||||
// trailing \\
|
||||
re += '\\\\'
|
||||
}
|
||||
|
||||
// only need to apply the nodot start if the re starts with
|
||||
// something that could conceivably capture a dot
|
||||
const addPatternStart = addPatternStartSet[re.charAt(0)]
|
||||
|
||||
// Hack to work around lack of negative lookbehind in JS
|
||||
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||||
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||||
// lookahead, has to look ALL the way ahead, to the end of
|
||||
// the pattern.
|
||||
for (let n = negativeLists.length - 1; n > -1; n--) {
|
||||
const nl = negativeLists[n]
|
||||
|
||||
const nlBefore = re.slice(0, nl.reStart)
|
||||
const nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
|
||||
let nlAfter = re.slice(nl.reEnd)
|
||||
const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter
|
||||
|
||||
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||||
// mean that we should *not* include the ) in the bit that is considered
|
||||
// "after" the negated section.
|
||||
const closeParensBefore = nlBefore.split(')').length
|
||||
const openParensBefore = nlBefore.split('(').length - closeParensBefore
|
||||
let cleanAfter = nlAfter
|
||||
for (let i = 0; i < openParensBefore; i++) {
|
||||
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
|
||||
}
|
||||
nlAfter = cleanAfter
|
||||
|
||||
const dollar = nlAfter === '' && isSub !== SUBPARSE ? '(?:$|\\/)' : ''
|
||||
|
||||
re = nlBefore + nlFirst + nlAfter + dollar + nlLast
|
||||
}
|
||||
|
||||
// if the re is not "" at this point, then we need to make sure
|
||||
// it doesn't match against an empty path part.
|
||||
// Otherwise a/* will match a/, which it should not.
|
||||
if (re !== '' && hasMagic) {
|
||||
re = '(?=.)' + re
|
||||
}
|
||||
|
||||
if (addPatternStart) {
|
||||
re = patternStart() + re
|
||||
}
|
||||
|
||||
// parsing just a piece of a larger pattern.
|
||||
if (isSub === SUBPARSE) {
|
||||
return [re, hasMagic]
|
||||
}
|
||||
|
||||
// if it's nocase, and the lcase/uppercase don't match, it's magic
|
||||
if (options.nocase && !hasMagic) {
|
||||
hasMagic = pattern.toUpperCase() !== pattern.toLowerCase()
|
||||
}
|
||||
|
||||
// skip the regexp for non-magical patterns
|
||||
// unescape anything in it, though, so that it'll be
|
||||
// an exact match against a file etc.
|
||||
if (!hasMagic) {
|
||||
return globUnescape(pattern)
|
||||
}
|
||||
|
||||
const flags = options.nocase ? 'i' : ''
|
||||
try {
|
||||
return Object.assign(new RegExp('^' + re + '$', flags), {
|
||||
_glob: pattern,
|
||||
_src: re,
|
||||
})
|
||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||
// If it was an invalid regular expression, then it can't match
|
||||
// anything. This trick looks for a character after the end of
|
||||
// the string, which is of course impossible, except in multi-line
|
||||
// mode, but it's not a /m regex.
|
||||
return new RegExp('$.')
|
||||
}
|
||||
}
|
||||
|
||||
makeRe () {
|
||||
if (this.regexp || this.regexp === false) return this.regexp
|
||||
|
||||
// at this point, this.set is a 2d array of partial
|
||||
// pattern strings, or "**".
|
||||
//
|
||||
// It's better to use .match(). This function shouldn't
|
||||
// be used, really, but it's pretty convenient sometimes,
|
||||
// when you just want to work with a regex.
|
||||
const set = this.set
|
||||
|
||||
if (!set.length) {
|
||||
this.regexp = false
|
||||
return this.regexp
|
||||
}
|
||||
const options = this.options
|
||||
|
||||
const twoStar = options.noglobstar ? star
|
||||
: options.dot ? twoStarDot
|
||||
: twoStarNoDot
|
||||
const flags = options.nocase ? 'i' : ''
|
||||
|
||||
// coalesce globstars and regexpify non-globstar patterns
|
||||
// if it's the only item, then we just do one twoStar
|
||||
// if it's the first, and there are more, prepend (\/|twoStar\/)? to next
|
||||
// if it's the last, append (\/twoStar|) to previous
|
||||
// if it's in the middle, append (\/|\/twoStar\/) to previous
|
||||
// then filter out GLOBSTAR symbols
|
||||
let re = set.map(pattern => {
|
||||
pattern = pattern.map(p =>
|
||||
typeof p === 'string' ? regExpEscape(p)
|
||||
: p === GLOBSTAR ? GLOBSTAR
|
||||
: p._src
|
||||
).reduce((set, p) => {
|
||||
if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) {
|
||||
set.push(p)
|
||||
}
|
||||
return set
|
||||
}, [])
|
||||
pattern.forEach((p, i) => {
|
||||
if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) {
|
||||
return
|
||||
}
|
||||
if (i === 0) {
|
||||
if (pattern.length > 1) {
|
||||
pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1]
|
||||
} else {
|
||||
pattern[i] = twoStar
|
||||
}
|
||||
} else if (i === pattern.length - 1) {
|
||||
pattern[i-1] += '(?:\\\/|' + twoStar + ')?'
|
||||
} else {
|
||||
pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1]
|
||||
pattern[i+1] = GLOBSTAR
|
||||
}
|
||||
})
|
||||
return pattern.filter(p => p !== GLOBSTAR).join('/')
|
||||
}).join('|')
|
||||
|
||||
// must match entire pattern
|
||||
// ending in a * or ** will make it less strict.
|
||||
re = '^(?:' + re + ')$'
|
||||
|
||||
// can match anything, as long as it's not this.
|
||||
if (this.negate) re = '^(?!' + re + ').*$'
|
||||
|
||||
try {
|
||||
this.regexp = new RegExp(re, flags)
|
||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||
this.regexp = false
|
||||
}
|
||||
return this.regexp
|
||||
}
|
||||
|
||||
match (f, partial = this.partial) {
|
||||
this.debug('match', f, this.pattern)
|
||||
// short-circuit in the case of busted things.
|
||||
// comments, etc.
|
||||
if (this.comment) return false
|
||||
if (this.empty) return f === ''
|
||||
|
||||
if (f === '/' && partial) return true
|
||||
|
||||
const options = this.options
|
||||
|
||||
// windows: need to use /, not \
|
||||
if (path.sep !== '/') {
|
||||
f = f.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
// treat the test path as a set of pathparts.
|
||||
f = f.split(slashSplit)
|
||||
this.debug(this.pattern, 'split', f)
|
||||
|
||||
// just ONE of the pattern sets in this.set needs to match
|
||||
// in order for it to be valid. If negating, then just one
|
||||
// match means that we have failed.
|
||||
// Either way, return on the first hit.
|
||||
|
||||
const set = this.set
|
||||
this.debug(this.pattern, 'set', set)
|
||||
|
||||
// Find the basename of the path by looking for the last non-empty segment
|
||||
let filename
|
||||
for (let i = f.length - 1; i >= 0; i--) {
|
||||
filename = f[i]
|
||||
if (filename) break
|
||||
}
|
||||
|
||||
for (let i = 0; i < set.length; i++) {
|
||||
const pattern = set[i]
|
||||
let file = f
|
||||
if (options.matchBase && pattern.length === 1) {
|
||||
file = [filename]
|
||||
}
|
||||
const hit = this.matchOne(file, pattern, partial)
|
||||
if (hit) {
|
||||
if (options.flipNegate) return true
|
||||
return !this.negate
|
||||
}
|
||||
}
|
||||
|
||||
// didn't get any hits. this is success if it's a negative
|
||||
// pattern, failure otherwise.
|
||||
if (options.flipNegate) return false
|
||||
return this.negate
|
||||
}
|
||||
|
||||
static defaults (def) {
|
||||
return minimatch.defaults(def).Minimatch
|
||||
}
|
||||
}
|
||||
|
||||
minimatch.Minimatch = Minimatch
|
||||
35
desktop-operator/node_modules/cacache/node_modules/minimatch/package.json
generated
vendored
Normal file
35
desktop-operator/node_modules/cacache/node_modules/minimatch/package.json
generated
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "minimatch",
|
||||
"description": "a glob matcher in javascript",
|
||||
"publishConfig": {
|
||||
"tag": "legacy-v5"
|
||||
},
|
||||
"version": "5.1.6",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/minimatch.git"
|
||||
},
|
||||
"main": "minimatch.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=10"
|
||||
},
|
||||
"dependencies": {
|
||||
"brace-expansion": "^2.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^16.3.2"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"minimatch.js",
|
||||
"lib"
|
||||
]
|
||||
}
|
||||
84
desktop-operator/node_modules/cacache/package.json
generated
vendored
Normal file
84
desktop-operator/node_modules/cacache/package.json
generated
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
{
|
||||
"name": "cacache",
|
||||
"version": "16.1.3",
|
||||
"cache-version": {
|
||||
"content": "2",
|
||||
"index": "5"
|
||||
},
|
||||
"description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
|
||||
"main": "lib/index.js",
|
||||
"files": [
|
||||
"bin/",
|
||||
"lib/"
|
||||
],
|
||||
"scripts": {
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"prepublishOnly": "git push origin --follow-tags",
|
||||
"test": "tap",
|
||||
"snap": "tap",
|
||||
"coverage": "tap",
|
||||
"test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
|
||||
"lint": "eslint \"**/*.js\"",
|
||||
"npmclilint": "npmcli-lint",
|
||||
"lintfix": "npm run lint -- --fix",
|
||||
"postsnap": "npm run lintfix --",
|
||||
"postlint": "template-oss-check",
|
||||
"posttest": "npm run lint",
|
||||
"template-oss-apply": "template-oss-apply --force"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/npm/cacache.git"
|
||||
},
|
||||
"keywords": [
|
||||
"cache",
|
||||
"caching",
|
||||
"content-addressable",
|
||||
"sri",
|
||||
"sri hash",
|
||||
"subresource integrity",
|
||||
"cache",
|
||||
"storage",
|
||||
"store",
|
||||
"file store",
|
||||
"filesystem",
|
||||
"disk cache",
|
||||
"disk storage"
|
||||
],
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@npmcli/fs": "^2.1.0",
|
||||
"@npmcli/move-file": "^2.0.0",
|
||||
"chownr": "^2.0.0",
|
||||
"fs-minipass": "^2.1.0",
|
||||
"glob": "^8.0.1",
|
||||
"infer-owner": "^1.0.4",
|
||||
"lru-cache": "^7.7.1",
|
||||
"minipass": "^3.1.6",
|
||||
"minipass-collect": "^1.0.2",
|
||||
"minipass-flush": "^1.0.5",
|
||||
"minipass-pipeline": "^1.2.4",
|
||||
"mkdirp": "^1.0.4",
|
||||
"p-map": "^4.0.0",
|
||||
"promise-inflight": "^1.0.1",
|
||||
"rimraf": "^3.0.2",
|
||||
"ssri": "^9.0.0",
|
||||
"tar": "^6.1.11",
|
||||
"unique-filename": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@npmcli/eslint-config": "^3.0.1",
|
||||
"@npmcli/template-oss": "3.5.0",
|
||||
"tap": "^16.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^12.13.0 || ^14.15.0 || >=16.0.0"
|
||||
},
|
||||
"templateOSS": {
|
||||
"//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
|
||||
"windowsCI": false,
|
||||
"version": "3.5.0"
|
||||
},
|
||||
"author": "GitHub Inc."
|
||||
}
|
||||
Reference in New Issue
Block a user