main funcions fixes
This commit is contained in:
20
desktop-operator/node_modules/@electron/asar/LICENSE.md
generated
vendored
Normal file
20
desktop-operator/node_modules/@electron/asar/LICENSE.md
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
Copyright (c) 2014 GitHub Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining
|
||||
a copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to
|
||||
permit persons to whom the Software is furnished to do so, subject to
|
||||
the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be
|
||||
included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
208
desktop-operator/node_modules/@electron/asar/README.md
generated
vendored
Normal file
208
desktop-operator/node_modules/@electron/asar/README.md
generated
vendored
Normal file
@@ -0,0 +1,208 @@
|
||||
# @electron/asar - Electron Archive
|
||||
|
||||
[](https://github.com/electron/asar/actions/workflows/test.yml)
|
||||
[](https://npmjs.org/package/@electron/asar)
|
||||
|
||||
Asar is a simple extensive archive format, it works like `tar` that concatenates
|
||||
all files together without compression, while having random access support.
|
||||
|
||||
## Features
|
||||
|
||||
* Support random access
|
||||
* Use JSON to store files' information
|
||||
* Very easy to write a parser
|
||||
|
||||
## Command line utility
|
||||
|
||||
### Install
|
||||
|
||||
This module requires Node 10 or later.
|
||||
|
||||
```bash
|
||||
$ npm install --engine-strict @electron/asar
|
||||
```
|
||||
|
||||
### Usage
|
||||
|
||||
```bash
|
||||
$ asar --help
|
||||
|
||||
Usage: asar [options] [command]
|
||||
|
||||
Commands:
|
||||
|
||||
pack|p <dir> <output>
|
||||
create asar archive
|
||||
|
||||
list|l <archive>
|
||||
list files of asar archive
|
||||
|
||||
extract-file|ef <archive> <filename>
|
||||
extract one file from archive
|
||||
|
||||
extract|e <archive> <dest>
|
||||
extract archive
|
||||
|
||||
|
||||
Options:
|
||||
|
||||
-h, --help output usage information
|
||||
-V, --version output the version number
|
||||
|
||||
```
|
||||
|
||||
#### Excluding multiple resources from being packed
|
||||
|
||||
Given:
|
||||
```
|
||||
app
|
||||
(a) ├── x1
|
||||
(b) ├── x2
|
||||
(c) ├── y3
|
||||
(d) │ ├── x1
|
||||
(e) │ └── z1
|
||||
(f) │ └── x2
|
||||
(g) └── z4
|
||||
(h) └── w1
|
||||
```
|
||||
|
||||
Exclude: a, b
|
||||
```bash
|
||||
$ asar pack app app.asar --unpack-dir "{x1,x2}"
|
||||
```
|
||||
|
||||
Exclude: a, b, d, f
|
||||
```bash
|
||||
$ asar pack app app.asar --unpack-dir "**/{x1,x2}"
|
||||
```
|
||||
|
||||
Exclude: a, b, d, f, h
|
||||
```bash
|
||||
$ asar pack app app.asar --unpack-dir "{**/x1,**/x2,z4/w1}"
|
||||
```
|
||||
|
||||
## Using programmatically
|
||||
|
||||
### Example
|
||||
|
||||
```javascript
|
||||
const asar = require('@electron/asar');
|
||||
|
||||
const src = 'some/path/';
|
||||
const dest = 'name.asar';
|
||||
|
||||
await asar.createPackage(src, dest);
|
||||
console.log('done.');
|
||||
```
|
||||
|
||||
Please note that there is currently **no** error handling provided!
|
||||
|
||||
### Transform
|
||||
You can pass in a `transform` option, that is a function, which either returns
|
||||
nothing, or a `stream.Transform`. The latter will be used on files that will be
|
||||
in the `.asar` file to transform them (e.g. compress).
|
||||
|
||||
```javascript
|
||||
const asar = require('@electron/asar');
|
||||
|
||||
const src = 'some/path/';
|
||||
const dest = 'name.asar';
|
||||
|
||||
function transform (filename) {
|
||||
return new CustomTransformStream()
|
||||
}
|
||||
|
||||
await asar.createPackageWithOptions(src, dest, { transform: transform });
|
||||
console.log('done.');
|
||||
```
|
||||
|
||||
## Format
|
||||
|
||||
Asar uses [Pickle][pickle] to safely serialize binary value to file.
|
||||
|
||||
The format of asar is very flat:
|
||||
|
||||
```
|
||||
| UInt32: header_size | String: header | Bytes: file1 | ... | Bytes: file42 |
|
||||
```
|
||||
|
||||
The `header_size` and `header` are serialized with [Pickle][pickle] class, and
|
||||
`header_size`'s [Pickle][pickle] object is 8 bytes.
|
||||
|
||||
The `header` is a JSON string, and the `header_size` is the size of `header`'s
|
||||
`Pickle` object.
|
||||
|
||||
Structure of `header` is something like this:
|
||||
|
||||
```json
|
||||
{
|
||||
"files": {
|
||||
"tmp": {
|
||||
"files": {}
|
||||
},
|
||||
"usr" : {
|
||||
"files": {
|
||||
"bin": {
|
||||
"files": {
|
||||
"ls": {
|
||||
"offset": "0",
|
||||
"size": 100,
|
||||
"executable": true,
|
||||
"integrity": {
|
||||
"algorithm": "SHA256",
|
||||
"hash": "...",
|
||||
"blockSize": 1024,
|
||||
"blocks": ["...", "..."]
|
||||
}
|
||||
},
|
||||
"cd": {
|
||||
"offset": "100",
|
||||
"size": 100,
|
||||
"executable": true,
|
||||
"integrity": {
|
||||
"algorithm": "SHA256",
|
||||
"hash": "...",
|
||||
"blockSize": 1024,
|
||||
"blocks": ["...", "..."]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"etc": {
|
||||
"files": {
|
||||
"hosts": {
|
||||
"offset": "200",
|
||||
"size": 32,
|
||||
"integrity": {
|
||||
"algorithm": "SHA256",
|
||||
"hash": "...",
|
||||
"blockSize": 1024,
|
||||
"blocks": ["...", "..."]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
`offset` and `size` records the information to read the file from archive, the
|
||||
`offset` starts from 0 so you have to manually add the size of `header_size` and
|
||||
`header` to the `offset` to get the real offset of the file.
|
||||
|
||||
`offset` is a UINT64 number represented in string, because there is no way to
|
||||
precisely represent UINT64 in JavaScript `Number`. `size` is a JavaScript
|
||||
`Number` that is no larger than `Number.MAX_SAFE_INTEGER`, which has a value of
|
||||
`9007199254740991` and is about 8PB in size. We didn't store `size` in UINT64
|
||||
because file size in Node.js is represented as `Number` and it is not safe to
|
||||
convert `Number` to UINT64.
|
||||
|
||||
`integrity` is an object consisting of a few keys:
|
||||
* A hashing `algorithm`, currently only `SHA256` is supported.
|
||||
* A hex encoded `hash` value representing the hash of the entire file.
|
||||
* An array of hex encoded hashes for the `blocks` of the file. i.e. for a blockSize of 4KB this array contains the hash of every block if you split the file into N 4KB blocks.
|
||||
* A integer value `blockSize` representing the size in bytes of each block in the `blocks` hashes above
|
||||
|
||||
[pickle]: https://chromium.googlesource.com/chromium/src/+/main/base/pickle.h
|
||||
82
desktop-operator/node_modules/@electron/asar/bin/asar.js
generated
vendored
Executable file
82
desktop-operator/node_modules/@electron/asar/bin/asar.js
generated
vendored
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
var packageJSON = require('../package.json')
|
||||
var splitVersion = function (version) { return version.split('.').map(function (part) { return Number(part) }) }
|
||||
var requiredNodeVersion = splitVersion(packageJSON.engines.node.slice(2))
|
||||
var actualNodeVersion = splitVersion(process.versions.node)
|
||||
|
||||
if (actualNodeVersion[0] < requiredNodeVersion[0] || (actualNodeVersion[0] === requiredNodeVersion[0] && actualNodeVersion[1] < requiredNodeVersion[1])) {
|
||||
console.error('CANNOT RUN WITH NODE ' + process.versions.node)
|
||||
console.error('asar requires Node ' + packageJSON.engines.node + '.')
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
// Not consts so that this file can load in Node < 4.0
|
||||
var asar = require('../lib/asar')
|
||||
var program = require('commander')
|
||||
|
||||
program.version('v' + packageJSON.version)
|
||||
.description('Manipulate asar archive files')
|
||||
|
||||
program.command('pack <dir> <output>')
|
||||
.alias('p')
|
||||
.description('create asar archive')
|
||||
.option('--ordering <file path>', 'path to a text file for ordering contents')
|
||||
.option('--unpack <expression>', 'do not pack files matching glob <expression>')
|
||||
.option('--unpack-dir <expression>', 'do not pack dirs matching glob <expression> or starting with literal <expression>')
|
||||
.option('--exclude-hidden', 'exclude hidden files')
|
||||
.action(function (dir, output, options) {
|
||||
options = {
|
||||
unpack: options.unpack,
|
||||
unpackDir: options.unpackDir,
|
||||
ordering: options.ordering,
|
||||
version: options.sv,
|
||||
arch: options.sa,
|
||||
builddir: options.sb,
|
||||
dot: !options.excludeHidden
|
||||
}
|
||||
asar.createPackageWithOptions(dir, output, options).catch(error => {
|
||||
console.error(error)
|
||||
process.exit(1)
|
||||
})
|
||||
})
|
||||
|
||||
program.command('list <archive>')
|
||||
.alias('l')
|
||||
.description('list files of asar archive')
|
||||
.option('-i, --is-pack', 'each file in the asar is pack or unpack')
|
||||
.action(function (archive, options) {
|
||||
options = {
|
||||
isPack: options.isPack
|
||||
}
|
||||
var files = asar.listPackage(archive, options)
|
||||
for (var i in files) {
|
||||
console.log(files[i])
|
||||
}
|
||||
})
|
||||
|
||||
program.command('extract-file <archive> <filename>')
|
||||
.alias('ef')
|
||||
.description('extract one file from archive')
|
||||
.action(function (archive, filename) {
|
||||
require('fs').writeFileSync(require('path').basename(filename),
|
||||
asar.extractFile(archive, filename))
|
||||
})
|
||||
|
||||
program.command('extract <archive> <dest>')
|
||||
.alias('e')
|
||||
.description('extract archive')
|
||||
.action(function (archive, dest) {
|
||||
asar.extractAll(archive, dest)
|
||||
})
|
||||
|
||||
program.command('*')
|
||||
.action(function (_cmd, args) {
|
||||
console.log('asar: \'%s\' is not an asar command. See \'asar --help\'.', args[0])
|
||||
})
|
||||
|
||||
program.parse(process.argv)
|
||||
|
||||
if (program.args.length === 0) {
|
||||
program.help()
|
||||
}
|
||||
96
desktop-operator/node_modules/@electron/asar/lib/asar.d.ts
generated
vendored
Normal file
96
desktop-operator/node_modules/@electron/asar/lib/asar.d.ts
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
import { FilesystemDirectoryEntry, FilesystemEntry, FilesystemLinkEntry } from './filesystem';
|
||||
import * as disk from './disk';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
import { IOptions } from './types/glob';
|
||||
export declare function createPackage(src: string, dest: string): Promise<NodeJS.WritableStream>;
|
||||
export type CreateOptions = {
|
||||
dot?: boolean;
|
||||
globOptions?: IOptions;
|
||||
/**
|
||||
* Path to a file containing the list of relative filepaths relative to `src` and the specific order they should be inserted into the asar.
|
||||
* Formats allowed below:
|
||||
* filepath
|
||||
* : filepath
|
||||
* <anything>:filepath
|
||||
*/
|
||||
ordering?: string;
|
||||
pattern?: string;
|
||||
transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
|
||||
unpack?: string;
|
||||
unpackDir?: string;
|
||||
};
|
||||
export declare function createPackageWithOptions(src: string, dest: string, options: CreateOptions): Promise<NodeJS.WritableStream>;
|
||||
/**
|
||||
* Create an ASAR archive from a list of filenames.
|
||||
*
|
||||
* @param src - Base path. All files are relative to this.
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param filenames - List of filenames relative to src.
|
||||
* @param [metadata] - Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
|
||||
* @param [options] - Options passed to `createPackageWithOptions`.
|
||||
*/
|
||||
export declare function createPackageFromFiles(src: string, dest: string, filenames: string[], metadata?: disk.InputMetadata, options?: CreateOptions): Promise<NodeJS.WritableStream>;
|
||||
export type AsarStream = {
|
||||
/**
|
||||
Relative path to the file or directory from within the archive
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
Function that returns a read stream for a file.
|
||||
Note: this is called multiple times per "file", so a new NodeJS.ReadableStream needs to be created each time
|
||||
*/
|
||||
streamGenerator: () => NodeJS.ReadableStream;
|
||||
/**
|
||||
Whether the file/link should be unpacked
|
||||
*/
|
||||
unpacked: boolean;
|
||||
stat: CrawledFileType['stat'];
|
||||
};
|
||||
export type AsarDirectory = Pick<AsarStream, 'path' | 'unpacked'> & {
|
||||
type: 'directory';
|
||||
};
|
||||
export type AsarSymlinkStream = AsarStream & {
|
||||
type: 'link';
|
||||
symlink: string;
|
||||
};
|
||||
export type AsarFileStream = AsarStream & {
|
||||
type: 'file';
|
||||
};
|
||||
export type AsarStreamType = AsarDirectory | AsarFileStream | AsarSymlinkStream;
|
||||
/**
|
||||
* Create an ASAR archive from a list of streams.
|
||||
*
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param streams - List of streams to be piped in-memory into asar filesystem. Insertion order is preserved.
|
||||
*/
|
||||
export declare function createPackageFromStreams(dest: string, streams: AsarStreamType[]): Promise<import("fs").WriteStream>;
|
||||
export declare function statFile(archivePath: string, filename: string, followLinks?: boolean): FilesystemEntry;
|
||||
export declare function getRawHeader(archivePath: string): disk.ArchiveHeader;
|
||||
export interface ListOptions {
|
||||
isPack: boolean;
|
||||
}
|
||||
export declare function listPackage(archivePath: string, options: ListOptions): string[];
|
||||
export declare function extractFile(archivePath: string, filename: string, followLinks?: boolean): Buffer;
|
||||
export declare function extractAll(archivePath: string, dest: string): void;
|
||||
export declare function uncache(archivePath: string): boolean;
|
||||
export declare function uncacheAll(): void;
|
||||
export { EntryMetadata } from './filesystem';
|
||||
export { InputMetadata, DirectoryRecord, FileRecord, ArchiveHeader } from './disk';
|
||||
export type InputMetadataType = 'directory' | 'file' | 'link';
|
||||
export type DirectoryMetadata = FilesystemDirectoryEntry;
|
||||
export type FileMetadata = FilesystemEntry;
|
||||
export type LinkMetadata = FilesystemLinkEntry;
|
||||
declare const _default: {
|
||||
createPackage: typeof createPackage;
|
||||
createPackageWithOptions: typeof createPackageWithOptions;
|
||||
createPackageFromFiles: typeof createPackageFromFiles;
|
||||
createPackageFromStreams: typeof createPackageFromStreams;
|
||||
statFile: typeof statFile;
|
||||
getRawHeader: typeof getRawHeader;
|
||||
listPackage: typeof listPackage;
|
||||
extractFile: typeof extractFile;
|
||||
extractAll: typeof extractAll;
|
||||
uncache: typeof uncache;
|
||||
uncacheAll: typeof uncacheAll;
|
||||
};
|
||||
export default _default;
|
||||
337
desktop-operator/node_modules/@electron/asar/lib/asar.js
generated
vendored
Normal file
337
desktop-operator/node_modules/@electron/asar/lib/asar.js
generated
vendored
Normal file
@@ -0,0 +1,337 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createPackage = createPackage;
|
||||
exports.createPackageWithOptions = createPackageWithOptions;
|
||||
exports.createPackageFromFiles = createPackageFromFiles;
|
||||
exports.createPackageFromStreams = createPackageFromStreams;
|
||||
exports.statFile = statFile;
|
||||
exports.getRawHeader = getRawHeader;
|
||||
exports.listPackage = listPackage;
|
||||
exports.extractFile = extractFile;
|
||||
exports.extractAll = extractAll;
|
||||
exports.uncache = uncache;
|
||||
exports.uncacheAll = uncacheAll;
|
||||
const path = __importStar(require("path"));
|
||||
const minimatch_1 = __importDefault(require("minimatch"));
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const filesystem_1 = require("./filesystem");
|
||||
const disk = __importStar(require("./disk"));
|
||||
const crawlfs_1 = require("./crawlfs");
|
||||
/**
|
||||
* Whether a directory should be excluded from packing due to the `--unpack-dir" option.
|
||||
*
|
||||
* @param dirPath - directory path to check
|
||||
* @param pattern - literal prefix [for backward compatibility] or glob pattern
|
||||
* @param unpackDirs - Array of directory paths previously marked as unpacked
|
||||
*/
|
||||
function isUnpackedDir(dirPath, pattern, unpackDirs) {
|
||||
if (dirPath.startsWith(pattern) || (0, minimatch_1.default)(dirPath, pattern)) {
|
||||
if (!unpackDirs.includes(dirPath)) {
|
||||
unpackDirs.push(dirPath);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return unpackDirs.some((unpackDir) => dirPath.startsWith(unpackDir) && !path.relative(unpackDir, dirPath).startsWith('..'));
|
||||
}
|
||||
}
|
||||
async function createPackage(src, dest) {
|
||||
return createPackageWithOptions(src, dest, {});
|
||||
}
|
||||
async function createPackageWithOptions(src, dest, options) {
|
||||
const globOptions = options.globOptions ? options.globOptions : {};
|
||||
globOptions.dot = options.dot === undefined ? true : options.dot;
|
||||
const pattern = src + (options.pattern ? options.pattern : '/**/*');
|
||||
const [filenames, metadata] = await (0, crawlfs_1.crawl)(pattern, globOptions);
|
||||
return createPackageFromFiles(src, dest, filenames, metadata, options);
|
||||
}
|
||||
/**
|
||||
* Create an ASAR archive from a list of filenames.
|
||||
*
|
||||
* @param src - Base path. All files are relative to this.
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param filenames - List of filenames relative to src.
|
||||
* @param [metadata] - Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
|
||||
* @param [options] - Options passed to `createPackageWithOptions`.
|
||||
*/
|
||||
async function createPackageFromFiles(src, dest, filenames, metadata = {}, options = {}) {
|
||||
src = path.normalize(src);
|
||||
dest = path.normalize(dest);
|
||||
filenames = filenames.map(function (filename) {
|
||||
return path.normalize(filename);
|
||||
});
|
||||
const filesystem = new filesystem_1.Filesystem(src);
|
||||
const files = [];
|
||||
const links = [];
|
||||
const unpackDirs = [];
|
||||
let filenamesSorted = [];
|
||||
if (options.ordering) {
|
||||
const orderingFiles = (await wrapped_fs_1.default.readFile(options.ordering))
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map((line) => {
|
||||
if (line.includes(':')) {
|
||||
line = line.split(':').pop();
|
||||
}
|
||||
line = line.trim();
|
||||
if (line.startsWith('/')) {
|
||||
line = line.slice(1);
|
||||
}
|
||||
return line;
|
||||
});
|
||||
const ordering = [];
|
||||
for (const file of orderingFiles) {
|
||||
const pathComponents = file.split(path.sep);
|
||||
let str = src;
|
||||
for (const pathComponent of pathComponents) {
|
||||
str = path.join(str, pathComponent);
|
||||
ordering.push(str);
|
||||
}
|
||||
}
|
||||
let missing = 0;
|
||||
const total = filenames.length;
|
||||
for (const file of ordering) {
|
||||
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
|
||||
filenamesSorted.push(file);
|
||||
}
|
||||
}
|
||||
for (const file of filenames) {
|
||||
if (!filenamesSorted.includes(file)) {
|
||||
filenamesSorted.push(file);
|
||||
missing += 1;
|
||||
}
|
||||
}
|
||||
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`);
|
||||
}
|
||||
else {
|
||||
filenamesSorted = filenames;
|
||||
}
|
||||
const handleFile = async function (filename) {
|
||||
if (!metadata[filename]) {
|
||||
const fileType = await (0, crawlfs_1.determineFileType)(filename);
|
||||
if (!fileType) {
|
||||
throw new Error('Unknown file type for file: ' + filename);
|
||||
}
|
||||
metadata[filename] = fileType;
|
||||
}
|
||||
const file = metadata[filename];
|
||||
const shouldUnpackPath = function (relativePath, unpack, unpackDir) {
|
||||
let shouldUnpack = false;
|
||||
if (unpack) {
|
||||
shouldUnpack = (0, minimatch_1.default)(filename, unpack, { matchBase: true });
|
||||
}
|
||||
if (!shouldUnpack && unpackDir) {
|
||||
shouldUnpack = isUnpackedDir(relativePath, unpackDir, unpackDirs);
|
||||
}
|
||||
return shouldUnpack;
|
||||
};
|
||||
let shouldUnpack;
|
||||
switch (file.type) {
|
||||
case 'directory':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, filename), undefined, options.unpackDir);
|
||||
filesystem.insertDirectory(filename, shouldUnpack);
|
||||
break;
|
||||
case 'file':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, path.dirname(filename)), options.unpack, options.unpackDir);
|
||||
files.push({ filename, unpack: shouldUnpack });
|
||||
return filesystem.insertFile(filename, () => wrapped_fs_1.default.createReadStream(filename), shouldUnpack, file, options);
|
||||
case 'link':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, filename), options.unpack, options.unpackDir);
|
||||
links.push({ filename, unpack: shouldUnpack });
|
||||
filesystem.insertLink(filename, shouldUnpack);
|
||||
break;
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
const insertsDone = async function () {
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(dest));
|
||||
return disk.writeFilesystem(dest, filesystem, { files, links }, metadata);
|
||||
};
|
||||
const names = filenamesSorted.slice();
|
||||
const next = async function (name) {
|
||||
if (!name) {
|
||||
return insertsDone();
|
||||
}
|
||||
await handleFile(name);
|
||||
return next(names.shift());
|
||||
};
|
||||
return next(names.shift());
|
||||
}
|
||||
/**
|
||||
* Create an ASAR archive from a list of streams.
|
||||
*
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param streams - List of streams to be piped in-memory into asar filesystem. Insertion order is preserved.
|
||||
*/
|
||||
async function createPackageFromStreams(dest, streams) {
|
||||
// We use an ambiguous root `src` since we're piping directly from a stream and the `filePath` for the stream is already relative to the src/root
|
||||
const src = '.';
|
||||
const filesystem = new filesystem_1.Filesystem(src);
|
||||
const files = [];
|
||||
const links = [];
|
||||
const handleFile = async function (stream) {
|
||||
const { path: destinationPath, type } = stream;
|
||||
const filename = path.normalize(destinationPath);
|
||||
switch (type) {
|
||||
case 'directory':
|
||||
filesystem.insertDirectory(filename, stream.unpacked);
|
||||
break;
|
||||
case 'file':
|
||||
files.push({
|
||||
filename,
|
||||
streamGenerator: stream.streamGenerator,
|
||||
link: undefined,
|
||||
mode: stream.stat.mode,
|
||||
unpack: stream.unpacked,
|
||||
});
|
||||
return filesystem.insertFile(filename, stream.streamGenerator, stream.unpacked, {
|
||||
type: 'file',
|
||||
stat: stream.stat,
|
||||
});
|
||||
case 'link':
|
||||
links.push({
|
||||
filename,
|
||||
streamGenerator: stream.streamGenerator,
|
||||
link: stream.symlink,
|
||||
mode: stream.stat.mode,
|
||||
unpack: stream.unpacked,
|
||||
});
|
||||
filesystem.insertLink(filename, stream.unpacked, path.dirname(filename), stream.symlink, src);
|
||||
break;
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
const insertsDone = async function () {
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(dest));
|
||||
return disk.streamFilesystem(dest, filesystem, { files, links });
|
||||
};
|
||||
const streamQueue = streams.slice();
|
||||
const next = async function (stream) {
|
||||
if (!stream) {
|
||||
return insertsDone();
|
||||
}
|
||||
await handleFile(stream);
|
||||
return next(streamQueue.shift());
|
||||
};
|
||||
return next(streamQueue.shift());
|
||||
}
|
||||
function statFile(archivePath, filename, followLinks = true) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
return filesystem.getFile(filename, followLinks);
|
||||
}
|
||||
function getRawHeader(archivePath) {
|
||||
return disk.readArchiveHeaderSync(archivePath);
|
||||
}
|
||||
function listPackage(archivePath, options) {
|
||||
return disk.readFilesystemSync(archivePath).listFiles(options);
|
||||
}
|
||||
function extractFile(archivePath, filename, followLinks = true) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
const fileInfo = filesystem.getFile(filename, followLinks);
|
||||
if ('link' in fileInfo || 'files' in fileInfo) {
|
||||
throw new Error('Expected to find file at: ' + filename + ' but found a directory or link');
|
||||
}
|
||||
return disk.readFileSync(filesystem, filename, fileInfo);
|
||||
}
|
||||
function extractAll(archivePath, dest) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
const filenames = filesystem.listFiles();
|
||||
// under windows just extract links as regular files
|
||||
const followLinks = process.platform === 'win32';
|
||||
// create destination directory
|
||||
wrapped_fs_1.default.mkdirpSync(dest);
|
||||
const extractionErrors = [];
|
||||
for (const fullPath of filenames) {
|
||||
// Remove leading slash
|
||||
const filename = fullPath.substr(1);
|
||||
const destFilename = path.join(dest, filename);
|
||||
const file = filesystem.getFile(filename, followLinks);
|
||||
if (path.relative(dest, destFilename).startsWith('..')) {
|
||||
throw new Error(`${fullPath}: file "${destFilename}" writes out of the package`);
|
||||
}
|
||||
if ('files' in file) {
|
||||
// it's a directory, create it and continue with the next entry
|
||||
wrapped_fs_1.default.mkdirpSync(destFilename);
|
||||
}
|
||||
else if ('link' in file) {
|
||||
// it's a symlink, create a symlink
|
||||
const linkSrcPath = path.dirname(path.join(dest, file.link));
|
||||
const linkDestPath = path.dirname(destFilename);
|
||||
const relativePath = path.relative(linkDestPath, linkSrcPath);
|
||||
// try to delete output file, because we can't overwrite a link
|
||||
try {
|
||||
wrapped_fs_1.default.unlinkSync(destFilename);
|
||||
}
|
||||
catch (_a) { }
|
||||
const linkTo = path.join(relativePath, path.basename(file.link));
|
||||
if (path.relative(dest, linkSrcPath).startsWith('..')) {
|
||||
throw new Error(`${fullPath}: file "${file.link}" links out of the package to "${linkSrcPath}"`);
|
||||
}
|
||||
wrapped_fs_1.default.symlinkSync(linkTo, destFilename);
|
||||
}
|
||||
else {
|
||||
// it's a file, try to extract it
|
||||
try {
|
||||
const content = disk.readFileSync(filesystem, filename, file);
|
||||
wrapped_fs_1.default.writeFileSync(destFilename, content);
|
||||
if (file.executable) {
|
||||
wrapped_fs_1.default.chmodSync(destFilename, '755');
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
extractionErrors.push(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extractionErrors.length) {
|
||||
throw new Error('Unable to extract some files:\n\n' +
|
||||
extractionErrors.map((error) => error.stack).join('\n\n'));
|
||||
}
|
||||
}
|
||||
function uncache(archivePath) {
|
||||
return disk.uncacheFilesystem(archivePath);
|
||||
}
|
||||
function uncacheAll() {
|
||||
disk.uncacheAll();
|
||||
}
|
||||
// Export everything in default, too
|
||||
exports.default = {
|
||||
createPackage,
|
||||
createPackageWithOptions,
|
||||
createPackageFromFiles,
|
||||
createPackageFromStreams,
|
||||
statFile,
|
||||
getRawHeader,
|
||||
listPackage,
|
||||
extractFile,
|
||||
extractAll,
|
||||
uncache,
|
||||
uncacheAll,
|
||||
};
|
||||
//# sourceMappingURL=asar.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/asar.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/asar.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
desktop-operator/node_modules/@electron/asar/lib/crawlfs.d.ts
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/asar/lib/crawlfs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Stats } from 'fs';
|
||||
import { IOptions } from './types/glob';
|
||||
export type CrawledFileType = {
|
||||
type: 'file' | 'directory' | 'link';
|
||||
stat: Pick<Stats, 'mode' | 'size'>;
|
||||
transformed?: {
|
||||
path: string;
|
||||
stat: Stats;
|
||||
};
|
||||
};
|
||||
export declare function determineFileType(filename: string): Promise<CrawledFileType | null>;
|
||||
export declare function crawl(dir: string, options: IOptions): Promise<readonly [string[], Record<string, CrawledFileType>]>;
|
||||
79
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js
generated
vendored
Normal file
79
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.determineFileType = determineFileType;
|
||||
exports.crawl = crawl;
|
||||
const util_1 = require("util");
|
||||
const glob_1 = require("glob");
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const glob = (0, util_1.promisify)(glob_1.glob);
|
||||
async function determineFileType(filename) {
|
||||
const stat = await wrapped_fs_1.default.lstat(filename);
|
||||
if (stat.isFile()) {
|
||||
return { type: 'file', stat };
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
return { type: 'directory', stat };
|
||||
}
|
||||
else if (stat.isSymbolicLink()) {
|
||||
return { type: 'link', stat };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function crawl(dir, options) {
|
||||
const metadata = {};
|
||||
const crawled = await glob(dir, options);
|
||||
const results = await Promise.all(crawled.map(async (filename) => [filename, await determineFileType(filename)]));
|
||||
const links = [];
|
||||
const filenames = results
|
||||
.map(([filename, type]) => {
|
||||
if (type) {
|
||||
metadata[filename] = type;
|
||||
if (type.type === 'link')
|
||||
links.push(filename);
|
||||
}
|
||||
return filename;
|
||||
})
|
||||
.filter((filename) => {
|
||||
// Newer glob can return files inside symlinked directories, to avoid
|
||||
// those appearing in archives we need to manually exclude theme here
|
||||
const exactLinkIndex = links.findIndex((link) => filename === link);
|
||||
return links.every((link, index) => {
|
||||
if (index === exactLinkIndex) {
|
||||
return true;
|
||||
}
|
||||
const isFileWithinSymlinkDir = filename.startsWith(link);
|
||||
// symlink may point outside the directory: https://github.com/electron/asar/issues/303
|
||||
const relativePath = path.relative(link, path.dirname(filename));
|
||||
return !isFileWithinSymlinkDir || relativePath.startsWith('..');
|
||||
});
|
||||
});
|
||||
return [filenames, metadata];
|
||||
}
|
||||
//# sourceMappingURL=crawlfs.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"crawlfs.js","sourceRoot":"","sources":["../src/crawlfs.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,8CAUC;AAED,sBA8BC;AA7DD,+BAAiC;AACjC,+BAAqC;AAErC,8DAA8B;AAE9B,2CAA6B;AAG7B,MAAM,IAAI,GAAG,IAAA,gBAAS,EAAC,WAAK,CAAC,CAAC;AAWvB,KAAK,UAAU,iBAAiB,CAAC,QAAgB;IACtD,MAAM,IAAI,GAAG,MAAM,oBAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IACtC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;QAClB,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QAC9B,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC;IACrC,CAAC;SAAM,IAAI,IAAI,CAAC,cAAc,EAAE,EAAE,CAAC;QACjC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;IAChC,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAEM,KAAK,UAAU,KAAK,CAAC,GAAW,EAAE,OAAiB;IACxD,MAAM,QAAQ,GAAoC,EAAE,CAAC;IACrD,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IACzC,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,GAAG,CAC/B,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC,QAAQ,EAAE,MAAM,iBAAiB,CAAC,QAAQ,CAAC,CAAU,CAAC,CACxF,CAAC;IACF,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,MAAM,SAAS,GAAG,OAAO;SACtB,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE,EAAE;QACxB,IAAI,IAAI,EAAE,CAAC;YACT,QAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC;YAC1B,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM;gBAAE,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACjD,CAAC;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC,CAAC;SACD,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE;QACnB,qEAAqE;QACrE,qEAAqE;QACrE,MAAM,cAAc,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,QAAQ,KAAK,IAAI,CAAC,CAAC;QACpE,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACjC,IAAI,KAAK,KAAK,cAAc,EAAE,CAAC;gBAC7B,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,sBAAsB,GAAG,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACzD,uFAAuF;YACvF,MAAM,YAAY,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;YACjE,OAAO,CAAC,sBAAsB,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAClE,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IACL,OAAO,CAAC,SAAS,EAAE,QAAQ,CAAU,CAAC;AACxC,CAAC"}
|
||||
44
desktop-operator/node_modules/@electron/asar/lib/disk.d.ts
generated
vendored
Normal file
44
desktop-operator/node_modules/@electron/asar/lib/disk.d.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Filesystem, FilesystemFileEntry } from './filesystem';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
import { Stats } from 'fs';
|
||||
export type InputMetadata = {
|
||||
[property: string]: CrawledFileType;
|
||||
};
|
||||
export type BasicFilesArray = {
|
||||
filename: string;
|
||||
unpack: boolean;
|
||||
}[];
|
||||
export type BasicStreamArray = {
|
||||
filename: string;
|
||||
streamGenerator: () => NodeJS.ReadableStream;
|
||||
mode: Stats['mode'];
|
||||
unpack: boolean;
|
||||
link: string | undefined;
|
||||
}[];
|
||||
export type FilesystemFilesAndLinks<T extends BasicFilesArray | BasicStreamArray> = {
|
||||
files: T;
|
||||
links: T;
|
||||
};
|
||||
export declare function writeFilesystem(dest: string, filesystem: Filesystem, lists: FilesystemFilesAndLinks<BasicFilesArray>, metadata: InputMetadata): Promise<NodeJS.WritableStream>;
|
||||
export declare function streamFilesystem(dest: string, filesystem: Filesystem, lists: FilesystemFilesAndLinks<BasicStreamArray>): Promise<import("fs").WriteStream>;
|
||||
export interface FileRecord extends FilesystemFileEntry {
|
||||
integrity: {
|
||||
hash: string;
|
||||
algorithm: 'SHA256';
|
||||
blocks: string[];
|
||||
blockSize: number;
|
||||
};
|
||||
}
|
||||
export type DirectoryRecord = {
|
||||
files: Record<string, DirectoryRecord | FileRecord>;
|
||||
};
|
||||
export type ArchiveHeader = {
|
||||
header: DirectoryRecord;
|
||||
headerString: string;
|
||||
headerSize: number;
|
||||
};
|
||||
export declare function readArchiveHeaderSync(archivePath: string): ArchiveHeader;
|
||||
export declare function readFilesystemSync(archivePath: string): Filesystem;
|
||||
export declare function uncacheFilesystem(archivePath: string): boolean;
|
||||
export declare function uncacheAll(): void;
|
||||
export declare function readFileSync(filesystem: Filesystem, filename: string, info: FilesystemFileEntry): Buffer;
|
||||
219
desktop-operator/node_modules/@electron/asar/lib/disk.js
generated
vendored
Normal file
219
desktop-operator/node_modules/@electron/asar/lib/disk.js
generated
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.writeFilesystem = writeFilesystem;
|
||||
exports.streamFilesystem = streamFilesystem;
|
||||
exports.readArchiveHeaderSync = readArchiveHeaderSync;
|
||||
exports.readFilesystemSync = readFilesystemSync;
|
||||
exports.uncacheFilesystem = uncacheFilesystem;
|
||||
exports.uncacheAll = uncacheAll;
|
||||
exports.readFileSync = readFileSync;
|
||||
const path = __importStar(require("path"));
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const pickle_1 = require("./pickle");
|
||||
const filesystem_1 = require("./filesystem");
|
||||
const util_1 = require("util");
|
||||
const stream = __importStar(require("stream"));
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
let filesystemCache = Object.create(null);
|
||||
async function copyFile(dest, src, filename) {
|
||||
const srcFile = path.join(src, filename);
|
||||
const targetFile = path.join(dest, filename);
|
||||
const [content, stats] = await Promise.all([
|
||||
wrapped_fs_1.default.readFile(srcFile),
|
||||
wrapped_fs_1.default.stat(srcFile),
|
||||
wrapped_fs_1.default.mkdirp(path.dirname(targetFile)),
|
||||
]);
|
||||
return wrapped_fs_1.default.writeFile(targetFile, content, { mode: stats.mode });
|
||||
}
|
||||
async function streamTransformedFile(stream, outStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.pipe(outStream, { end: false });
|
||||
stream.on('error', reject);
|
||||
stream.on('end', () => resolve());
|
||||
});
|
||||
}
|
||||
const writeFileListToStream = async function (dest, filesystem, out, lists, metadata) {
|
||||
const { files, links } = lists;
|
||||
for (const file of files) {
|
||||
if (file.unpack) {
|
||||
// the file should not be packed into archive
|
||||
const filename = path.relative(filesystem.getRootPath(), file.filename);
|
||||
await copyFile(`${dest}.unpacked`, filesystem.getRootPath(), filename);
|
||||
}
|
||||
else {
|
||||
const transformed = metadata[file.filename].transformed;
|
||||
const stream = wrapped_fs_1.default.createReadStream(transformed ? transformed.path : file.filename);
|
||||
await streamTransformedFile(stream, out);
|
||||
}
|
||||
}
|
||||
for (const file of links.filter((f) => f.unpack)) {
|
||||
// the symlink needs to be recreated outside in .unpacked
|
||||
const filename = path.relative(filesystem.getRootPath(), file.filename);
|
||||
const link = await wrapped_fs_1.default.readlink(file.filename);
|
||||
await createSymlink(dest, filename, link);
|
||||
}
|
||||
return out.end();
|
||||
};
|
||||
async function writeFilesystem(dest, filesystem, lists, metadata) {
|
||||
const out = await createFilesystemWriteStream(filesystem, dest);
|
||||
return writeFileListToStream(dest, filesystem, out, lists, metadata);
|
||||
}
|
||||
async function streamFilesystem(dest, filesystem, lists) {
|
||||
var _a, e_1, _b, _c;
|
||||
const out = await createFilesystemWriteStream(filesystem, dest);
|
||||
const { files, links } = lists;
|
||||
try {
|
||||
for (var _d = true, files_1 = __asyncValues(files), files_1_1; files_1_1 = await files_1.next(), _a = files_1_1.done, !_a; _d = true) {
|
||||
_c = files_1_1.value;
|
||||
_d = false;
|
||||
const file = _c;
|
||||
// the file should not be packed into archive
|
||||
if (file.unpack) {
|
||||
const targetFile = path.join(`${dest}.unpacked`, file.filename);
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(targetFile));
|
||||
const writeStream = wrapped_fs_1.default.createWriteStream(targetFile, { mode: file.mode });
|
||||
await pipeline(file.streamGenerator(), writeStream);
|
||||
}
|
||||
else {
|
||||
await streamTransformedFile(file.streamGenerator(), out);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_d && !_a && (_b = files_1.return)) await _b.call(files_1);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
for (const file of links.filter((f) => f.unpack && f.link)) {
|
||||
// the symlink needs to be recreated outside in .unpacked
|
||||
await createSymlink(dest, file.filename, file.link);
|
||||
}
|
||||
return out.end();
|
||||
}
|
||||
function readArchiveHeaderSync(archivePath) {
|
||||
const fd = wrapped_fs_1.default.openSync(archivePath, 'r');
|
||||
let size;
|
||||
let headerBuf;
|
||||
try {
|
||||
const sizeBuf = Buffer.alloc(8);
|
||||
if (wrapped_fs_1.default.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
|
||||
throw new Error('Unable to read header size');
|
||||
}
|
||||
const sizePickle = pickle_1.Pickle.createFromBuffer(sizeBuf);
|
||||
size = sizePickle.createIterator().readUInt32();
|
||||
headerBuf = Buffer.alloc(size);
|
||||
if (wrapped_fs_1.default.readSync(fd, headerBuf, 0, size, null) !== size) {
|
||||
throw new Error('Unable to read header');
|
||||
}
|
||||
}
|
||||
finally {
|
||||
wrapped_fs_1.default.closeSync(fd);
|
||||
}
|
||||
const headerPickle = pickle_1.Pickle.createFromBuffer(headerBuf);
|
||||
const header = headerPickle.createIterator().readString();
|
||||
return { headerString: header, header: JSON.parse(header), headerSize: size };
|
||||
}
|
||||
function readFilesystemSync(archivePath) {
|
||||
if (!filesystemCache[archivePath]) {
|
||||
const header = readArchiveHeaderSync(archivePath);
|
||||
const filesystem = new filesystem_1.Filesystem(archivePath);
|
||||
filesystem.setHeader(header.header, header.headerSize);
|
||||
filesystemCache[archivePath] = filesystem;
|
||||
}
|
||||
return filesystemCache[archivePath];
|
||||
}
|
||||
function uncacheFilesystem(archivePath) {
|
||||
if (filesystemCache[archivePath]) {
|
||||
filesystemCache[archivePath] = undefined;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function uncacheAll() {
|
||||
filesystemCache = {};
|
||||
}
|
||||
function readFileSync(filesystem, filename, info) {
|
||||
let buffer = Buffer.alloc(info.size);
|
||||
if (info.size <= 0) {
|
||||
return buffer;
|
||||
}
|
||||
if (info.unpacked) {
|
||||
// it's an unpacked file, copy it.
|
||||
buffer = wrapped_fs_1.default.readFileSync(path.join(`${filesystem.getRootPath()}.unpacked`, filename));
|
||||
}
|
||||
else {
|
||||
// Node throws an exception when reading 0 bytes into a 0-size buffer,
|
||||
// so we short-circuit the read in this case.
|
||||
const fd = wrapped_fs_1.default.openSync(filesystem.getRootPath(), 'r');
|
||||
try {
|
||||
const offset = 8 + filesystem.getHeaderSize() + parseInt(info.offset);
|
||||
wrapped_fs_1.default.readSync(fd, buffer, 0, info.size, offset);
|
||||
}
|
||||
finally {
|
||||
wrapped_fs_1.default.closeSync(fd);
|
||||
}
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
async function createFilesystemWriteStream(filesystem, dest) {
|
||||
const headerPickle = pickle_1.Pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.getHeader()));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle_1.Pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
const out = wrapped_fs_1.default.createWriteStream(dest);
|
||||
await new Promise((resolve, reject) => {
|
||||
out.on('error', reject);
|
||||
out.write(sizeBuf);
|
||||
return out.write(headerBuf, () => resolve());
|
||||
});
|
||||
return out;
|
||||
}
|
||||
async function createSymlink(dest, filepath, link) {
|
||||
// if symlink is within subdirectories, then we need to recreate dir structure
|
||||
await wrapped_fs_1.default.mkdirp(path.join(`${dest}.unpacked`, path.dirname(filepath)));
|
||||
// create symlink within unpacked dir
|
||||
await wrapped_fs_1.default.symlink(link, path.join(`${dest}.unpacked`, filepath)).catch(async (error) => {
|
||||
if (error.code === 'EPERM' && error.syscall === 'symlink') {
|
||||
throw new Error('Could not create symlinks for unpacked assets. On Windows, consider activating Developer Mode to allow non-admin users to create symlinks by following the instructions at https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development.');
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=disk.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/disk.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/disk.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
44
desktop-operator/node_modules/@electron/asar/lib/filesystem.d.ts
generated
vendored
Normal file
44
desktop-operator/node_modules/@electron/asar/lib/filesystem.d.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { FileIntegrity } from './integrity';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
export type EntryMetadata = {
|
||||
unpacked?: boolean;
|
||||
};
|
||||
export type FilesystemDirectoryEntry = {
|
||||
files: Record<string, FilesystemEntry>;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemFileEntry = {
|
||||
unpacked: boolean;
|
||||
executable: boolean;
|
||||
offset: string;
|
||||
size: number;
|
||||
integrity: FileIntegrity;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemLinkEntry = {
|
||||
link: string;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemEntry = FilesystemDirectoryEntry | FilesystemFileEntry | FilesystemLinkEntry;
|
||||
export declare class Filesystem {
|
||||
private src;
|
||||
private header;
|
||||
private headerSize;
|
||||
private offset;
|
||||
constructor(src: string);
|
||||
getRootPath(): string;
|
||||
getHeader(): FilesystemEntry;
|
||||
getHeaderSize(): number;
|
||||
setHeader(header: FilesystemEntry, headerSize: number): void;
|
||||
searchNodeFromDirectory(p: string): FilesystemEntry;
|
||||
searchNodeFromPath(p: string): FilesystemEntry;
|
||||
insertDirectory(p: string, shouldUnpack: boolean): Record<string, FilesystemEntry>;
|
||||
insertFile(p: string, streamGenerator: () => NodeJS.ReadableStream, shouldUnpack: boolean, file: CrawledFileType, options?: {
|
||||
transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
|
||||
}): Promise<void>;
|
||||
insertLink(p: string, shouldUnpack: boolean, parentPath?: string, symlink?: string, // /var/tmp => /private/var
|
||||
src?: string): string;
|
||||
private resolveLink;
|
||||
listFiles(options?: {
|
||||
isPack: boolean;
|
||||
}): string[];
|
||||
getNode(p: string, followLinks?: boolean): FilesystemEntry;
|
||||
getFile(p: string, followLinks?: boolean): FilesystemEntry;
|
||||
}
|
||||
199
desktop-operator/node_modules/@electron/asar/lib/filesystem.js
generated
vendored
Normal file
199
desktop-operator/node_modules/@electron/asar/lib/filesystem.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Filesystem = void 0;
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const stream = __importStar(require("stream"));
|
||||
const integrity_1 = require("./integrity");
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const UINT32_MAX = 2 ** 32 - 1;
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
class Filesystem {
|
||||
constructor(src) {
|
||||
this.src = path.resolve(src);
|
||||
this.header = { files: Object.create(null) };
|
||||
this.headerSize = 0;
|
||||
this.offset = BigInt(0);
|
||||
}
|
||||
getRootPath() {
|
||||
return this.src;
|
||||
}
|
||||
getHeader() {
|
||||
return this.header;
|
||||
}
|
||||
getHeaderSize() {
|
||||
return this.headerSize;
|
||||
}
|
||||
setHeader(header, headerSize) {
|
||||
this.header = header;
|
||||
this.headerSize = headerSize;
|
||||
}
|
||||
searchNodeFromDirectory(p) {
|
||||
let json = this.header;
|
||||
const dirs = p.split(path.sep);
|
||||
for (const dir of dirs) {
|
||||
if (dir !== '.') {
|
||||
if ('files' in json) {
|
||||
if (!json.files[dir]) {
|
||||
json.files[dir] = { files: Object.create(null) };
|
||||
}
|
||||
json = json.files[dir];
|
||||
}
|
||||
else {
|
||||
throw new Error('Unexpected directory state while traversing: ' + p);
|
||||
}
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
searchNodeFromPath(p) {
|
||||
p = path.relative(this.src, p);
|
||||
if (!p) {
|
||||
return this.header;
|
||||
}
|
||||
const name = path.basename(p);
|
||||
const node = this.searchNodeFromDirectory(path.dirname(p));
|
||||
if (!node.files) {
|
||||
node.files = Object.create(null);
|
||||
}
|
||||
if (!node.files[name]) {
|
||||
node.files[name] = Object.create(null);
|
||||
}
|
||||
return node.files[name];
|
||||
}
|
||||
insertDirectory(p, shouldUnpack) {
|
||||
const node = this.searchNodeFromPath(p);
|
||||
if (shouldUnpack) {
|
||||
node.unpacked = shouldUnpack;
|
||||
}
|
||||
node.files = node.files || Object.create(null);
|
||||
return node.files;
|
||||
}
|
||||
async insertFile(p, streamGenerator, shouldUnpack, file, options = {}) {
|
||||
const dirNode = this.searchNodeFromPath(path.dirname(p));
|
||||
const node = this.searchNodeFromPath(p);
|
||||
if (shouldUnpack || dirNode.unpacked) {
|
||||
node.size = file.stat.size;
|
||||
node.unpacked = true;
|
||||
node.integrity = await (0, integrity_1.getFileIntegrity)(streamGenerator());
|
||||
return Promise.resolve();
|
||||
}
|
||||
let size;
|
||||
const transformed = options.transform && options.transform(p);
|
||||
if (transformed) {
|
||||
const tmpdir = await wrapped_fs_1.default.mkdtemp(path.join(os.tmpdir(), 'asar-'));
|
||||
const tmpfile = path.join(tmpdir, path.basename(p));
|
||||
const out = wrapped_fs_1.default.createWriteStream(tmpfile);
|
||||
await pipeline(streamGenerator(), transformed, out);
|
||||
file.transformed = {
|
||||
path: tmpfile,
|
||||
stat: await wrapped_fs_1.default.lstat(tmpfile),
|
||||
};
|
||||
size = file.transformed.stat.size;
|
||||
}
|
||||
else {
|
||||
size = file.stat.size;
|
||||
}
|
||||
// JavaScript cannot precisely present integers >= UINT32_MAX.
|
||||
if (size > UINT32_MAX) {
|
||||
throw new Error(`${p}: file size can not be larger than 4.2GB`);
|
||||
}
|
||||
node.size = size;
|
||||
node.offset = this.offset.toString();
|
||||
node.integrity = await (0, integrity_1.getFileIntegrity)(streamGenerator());
|
||||
if (process.platform !== 'win32' && file.stat.mode & 0o100) {
|
||||
node.executable = true;
|
||||
}
|
||||
this.offset += BigInt(size);
|
||||
}
|
||||
insertLink(p, shouldUnpack, parentPath = wrapped_fs_1.default.realpathSync(path.dirname(p)), symlink = wrapped_fs_1.default.readlinkSync(p), // /var/tmp => /private/var
|
||||
src = wrapped_fs_1.default.realpathSync(this.src)) {
|
||||
const link = this.resolveLink(src, parentPath, symlink);
|
||||
if (link.startsWith('..')) {
|
||||
throw new Error(`${p}: file "${link}" links out of the package`);
|
||||
}
|
||||
const node = this.searchNodeFromPath(p);
|
||||
const dirNode = this.searchNodeFromPath(path.dirname(p));
|
||||
if (shouldUnpack || dirNode.unpacked) {
|
||||
node.unpacked = true;
|
||||
}
|
||||
node.link = link;
|
||||
return link;
|
||||
}
|
||||
resolveLink(src, parentPath, symlink) {
|
||||
const target = path.join(parentPath, symlink);
|
||||
const link = path.relative(src, target);
|
||||
return link;
|
||||
}
|
||||
listFiles(options) {
|
||||
const files = [];
|
||||
const fillFilesFromMetadata = function (basePath, metadata) {
|
||||
if (!('files' in metadata)) {
|
||||
return;
|
||||
}
|
||||
for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
|
||||
const fullPath = path.join(basePath, childPath);
|
||||
const packState = 'unpacked' in childMetadata && childMetadata.unpacked ? 'unpack' : 'pack ';
|
||||
files.push(options && options.isPack ? `${packState} : ${fullPath}` : fullPath);
|
||||
fillFilesFromMetadata(fullPath, childMetadata);
|
||||
}
|
||||
};
|
||||
fillFilesFromMetadata('/', this.header);
|
||||
return files;
|
||||
}
|
||||
getNode(p, followLinks = true) {
|
||||
const node = this.searchNodeFromDirectory(path.dirname(p));
|
||||
const name = path.basename(p);
|
||||
if ('link' in node && followLinks) {
|
||||
return this.getNode(path.join(node.link, name));
|
||||
}
|
||||
if (name) {
|
||||
return node.files[name];
|
||||
}
|
||||
else {
|
||||
return node;
|
||||
}
|
||||
}
|
||||
getFile(p, followLinks = true) {
|
||||
const info = this.getNode(p, followLinks);
|
||||
if (!info) {
|
||||
throw new Error(`"${p}" was not found in this archive`);
|
||||
}
|
||||
// if followLinks is false we don't resolve symlinks
|
||||
if ('link' in info && followLinks) {
|
||||
return this.getFile(info.link, followLinks);
|
||||
}
|
||||
else {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Filesystem = Filesystem;
|
||||
//# sourceMappingURL=filesystem.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/filesystem.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/filesystem.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
desktop-operator/node_modules/@electron/asar/lib/integrity.d.ts
generated
vendored
Normal file
7
desktop-operator/node_modules/@electron/asar/lib/integrity.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export type FileIntegrity = {
|
||||
algorithm: 'SHA256';
|
||||
hash: string;
|
||||
blockSize: number;
|
||||
blocks: string[];
|
||||
};
|
||||
export declare function getFileIntegrity(inputFileStream: NodeJS.ReadableStream): Promise<FileIntegrity>;
|
||||
75
desktop-operator/node_modules/@electron/asar/lib/integrity.js
generated
vendored
Normal file
75
desktop-operator/node_modules/@electron/asar/lib/integrity.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getFileIntegrity = getFileIntegrity;
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const util_1 = require("util");
|
||||
const ALGORITHM = 'SHA256';
|
||||
// 4MB default block size
|
||||
const BLOCK_SIZE = 4 * 1024 * 1024;
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
function hashBlock(block) {
|
||||
return crypto.createHash(ALGORITHM).update(block).digest('hex');
|
||||
}
|
||||
async function getFileIntegrity(inputFileStream) {
|
||||
const fileHash = crypto.createHash(ALGORITHM);
|
||||
const blockHashes = [];
|
||||
let currentBlockSize = 0;
|
||||
let currentBlock = [];
|
||||
await pipeline(inputFileStream, new stream.PassThrough({
|
||||
decodeStrings: false,
|
||||
transform(_chunk, encoding, callback) {
|
||||
fileHash.update(_chunk);
|
||||
function handleChunk(chunk) {
|
||||
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength);
|
||||
currentBlockSize += diffToSlice;
|
||||
currentBlock.push(chunk.slice(0, diffToSlice));
|
||||
if (currentBlockSize === BLOCK_SIZE) {
|
||||
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
|
||||
currentBlock = [];
|
||||
currentBlockSize = 0;
|
||||
}
|
||||
if (diffToSlice < chunk.byteLength) {
|
||||
handleChunk(chunk.slice(diffToSlice));
|
||||
}
|
||||
}
|
||||
handleChunk(_chunk);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
|
||||
currentBlock = [];
|
||||
callback();
|
||||
},
|
||||
}));
|
||||
return {
|
||||
algorithm: ALGORITHM,
|
||||
hash: fileHash.digest('hex'),
|
||||
blockSize: BLOCK_SIZE,
|
||||
blocks: blockHashes,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=integrity.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/integrity.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/integrity.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"integrity.js","sourceRoot":"","sources":["../src/integrity.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAsBA,4CA8CC;AApED,+CAAiC;AAEjC,+CAAiC;AACjC,+BAAiC;AAEjC,MAAM,SAAS,GAAG,QAAQ,CAAC;AAC3B,yBAAyB;AACzB,MAAM,UAAU,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAEnC,MAAM,QAAQ,GAAG,IAAA,gBAAS,EAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAE5C,SAAS,SAAS,CAAC,KAAa;IAC9B,OAAO,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAClE,CAAC;AASM,KAAK,UAAU,gBAAgB,CACpC,eAAsC;IAEtC,MAAM,QAAQ,GAAG,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAa,EAAE,CAAC;IACjC,IAAI,gBAAgB,GAAG,CAAC,CAAC;IACzB,IAAI,YAAY,GAAa,EAAE,CAAC;IAEhC,MAAM,QAAQ,CACZ,eAAe,EACf,IAAI,MAAM,CAAC,WAAW,CAAC;QACrB,aAAa,EAAE,KAAK;QACpB,SAAS,CAAC,MAAc,EAAE,QAAQ,EAAE,QAAQ;YAC1C,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAExB,SAAS,WAAW,CAAC,KAAa;gBAChC,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,GAAG,gBAAgB,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC9E,gBAAgB,IAAI,WAAW,CAAC;gBAChC,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC;gBAC/C,IAAI,gBAAgB,KAAK,UAAU,EAAE,CAAC;oBACpC,WAAW,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;oBACzD,YAAY,GAAG,EAAE,CAAC;oBAClB,gBAAgB,GAAG,CAAC,CAAC;gBACvB,CAAC;gBACD,IAAI,WAAW,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;oBACnC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC;gBACxC,CAAC;YACH,CAAC;YACD,WAAW,CAAC,MAAM,CAAC,CAAC;YACpB,QAAQ,EAAE,CAAC;QACb,CAAC;QACD,KAAK,CAAC,QAAQ;YACZ,WAAW,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACzD,YAAY,GAAG,EAAE,CAAC;YAClB,QAAQ,EAAE,CAAC;QACb,CAAC;KACF,CAAC,CACH,CAAC;IAEF,OAAO;QACL,SAAS,EAAE,SAAS;QACpB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC;QAC5B,SAAS,EAAE,UAAU;QACrB,MAAM,EAAE,WAAW;KACpB,CAAC;AACJ,CAAC"}
|
||||
46
desktop-operator/node_modules/@electron/asar/lib/pickle.d.ts
generated
vendored
Normal file
46
desktop-operator/node_modules/@electron/asar/lib/pickle.d.ts
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
declare class PickleIterator {
|
||||
private payload;
|
||||
private payloadOffset;
|
||||
private readIndex;
|
||||
private endIndex;
|
||||
constructor(pickle: Pickle);
|
||||
readBool(): boolean;
|
||||
readInt(): number;
|
||||
readUInt32(): number;
|
||||
readInt64(): bigint;
|
||||
readUInt64(): bigint;
|
||||
readFloat(): number;
|
||||
readDouble(): number;
|
||||
readString(): string;
|
||||
readBytes(length: number): Buffer;
|
||||
readBytes<R, F extends (...args: any[]) => R>(length: number, method: F): R;
|
||||
getReadPayloadOffsetAndAdvance(length: number): number;
|
||||
advance(size: number): void;
|
||||
}
|
||||
export declare class Pickle {
|
||||
private header;
|
||||
private headerSize;
|
||||
private capacityAfterHeader;
|
||||
private writeOffset;
|
||||
private constructor();
|
||||
static createEmpty(): Pickle;
|
||||
static createFromBuffer(buffer: Buffer): Pickle;
|
||||
getHeader(): Buffer;
|
||||
getHeaderSize(): number;
|
||||
createIterator(): PickleIterator;
|
||||
toBuffer(): Buffer;
|
||||
writeBool(value: boolean): boolean;
|
||||
writeInt(value: number): boolean;
|
||||
writeUInt32(value: number): boolean;
|
||||
writeInt64(value: number): boolean;
|
||||
writeUInt64(value: number): boolean;
|
||||
writeFloat(value: number): boolean;
|
||||
writeDouble(value: number): boolean;
|
||||
writeString(value: string): boolean;
|
||||
setPayloadSize(payloadSize: number): number;
|
||||
getPayloadSize(): number;
|
||||
writeBytes(data: string, length: number, method?: undefined): boolean;
|
||||
writeBytes(data: number | BigInt, length: number, method: Function): boolean;
|
||||
resize(newCapacity: number): void;
|
||||
}
|
||||
export {};
|
||||
199
desktop-operator/node_modules/@electron/asar/lib/pickle.js
generated
vendored
Normal file
199
desktop-operator/node_modules/@electron/asar/lib/pickle.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Pickle = void 0;
|
||||
// sizeof(T).
|
||||
const SIZE_INT32 = 4;
|
||||
const SIZE_UINT32 = 4;
|
||||
const SIZE_INT64 = 8;
|
||||
const SIZE_UINT64 = 8;
|
||||
const SIZE_FLOAT = 4;
|
||||
const SIZE_DOUBLE = 8;
|
||||
// The allocation granularity of the payload.
|
||||
const PAYLOAD_UNIT = 64;
|
||||
// Largest JS number.
|
||||
const CAPACITY_READ_ONLY = 9007199254740992;
|
||||
// Aligns 'i' by rounding it up to the next multiple of 'alignment'.
|
||||
const alignInt = function (i, alignment) {
|
||||
return i + ((alignment - (i % alignment)) % alignment);
|
||||
};
|
||||
// PickleIterator reads data from a Pickle. The Pickle object must remain valid
|
||||
// while the PickleIterator object is in use.
|
||||
class PickleIterator {
|
||||
constructor(pickle) {
|
||||
this.payload = pickle.getHeader();
|
||||
this.payloadOffset = pickle.getHeaderSize();
|
||||
this.readIndex = 0;
|
||||
this.endIndex = pickle.getPayloadSize();
|
||||
}
|
||||
readBool() {
|
||||
return this.readInt() !== 0;
|
||||
}
|
||||
readInt() {
|
||||
return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE);
|
||||
}
|
||||
readUInt32() {
|
||||
return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE);
|
||||
}
|
||||
readInt64() {
|
||||
return this.readBytes(SIZE_INT64, Buffer.prototype.readBigInt64LE);
|
||||
}
|
||||
readUInt64() {
|
||||
return this.readBytes(SIZE_UINT64, Buffer.prototype.readBigUInt64LE);
|
||||
}
|
||||
readFloat() {
|
||||
return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE);
|
||||
}
|
||||
readDouble() {
|
||||
return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE);
|
||||
}
|
||||
readString() {
|
||||
return this.readBytes(this.readInt()).toString();
|
||||
}
|
||||
readBytes(length, method) {
|
||||
const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length);
|
||||
if (method != null) {
|
||||
return method.call(this.payload, readPayloadOffset, length);
|
||||
}
|
||||
else {
|
||||
return this.payload.slice(readPayloadOffset, readPayloadOffset + length);
|
||||
}
|
||||
}
|
||||
getReadPayloadOffsetAndAdvance(length) {
|
||||
if (length > this.endIndex - this.readIndex) {
|
||||
this.readIndex = this.endIndex;
|
||||
throw new Error('Failed to read data with length of ' + length);
|
||||
}
|
||||
const readPayloadOffset = this.payloadOffset + this.readIndex;
|
||||
this.advance(length);
|
||||
return readPayloadOffset;
|
||||
}
|
||||
advance(size) {
|
||||
const alignedSize = alignInt(size, SIZE_UINT32);
|
||||
if (this.endIndex - this.readIndex < alignedSize) {
|
||||
this.readIndex = this.endIndex;
|
||||
}
|
||||
else {
|
||||
this.readIndex += alignedSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
// This class provides facilities for basic binary value packing and unpacking.
|
||||
//
|
||||
// The Pickle class supports appending primitive values (ints, strings, etc.)
|
||||
// to a pickle instance. The Pickle instance grows its internal memory buffer
|
||||
// dynamically to hold the sequence of primitive values. The internal memory
|
||||
// buffer is exposed as the "data" of the Pickle. This "data" can be passed
|
||||
// to a Pickle object to initialize it for reading.
|
||||
//
|
||||
// When reading from a Pickle object, it is important for the consumer to know
|
||||
// what value types to read and in what order to read them as the Pickle does
|
||||
// not keep track of the type of data written to it.
|
||||
//
|
||||
// The Pickle's data has a header which contains the size of the Pickle's
|
||||
// payload. It can optionally support additional space in the header. That
|
||||
// space is controlled by the header_size parameter passed to the Pickle
|
||||
// constructor.
|
||||
class Pickle {
|
||||
constructor(buffer) {
|
||||
if (buffer) {
|
||||
this.header = buffer;
|
||||
this.headerSize = buffer.length - this.getPayloadSize();
|
||||
this.capacityAfterHeader = CAPACITY_READ_ONLY;
|
||||
this.writeOffset = 0;
|
||||
if (this.headerSize > buffer.length) {
|
||||
this.headerSize = 0;
|
||||
}
|
||||
if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
|
||||
this.headerSize = 0;
|
||||
}
|
||||
if (this.headerSize === 0) {
|
||||
this.header = Buffer.alloc(0);
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.header = Buffer.alloc(0);
|
||||
this.headerSize = SIZE_UINT32;
|
||||
this.capacityAfterHeader = 0;
|
||||
this.writeOffset = 0;
|
||||
this.resize(PAYLOAD_UNIT);
|
||||
this.setPayloadSize(0);
|
||||
}
|
||||
}
|
||||
static createEmpty() {
|
||||
return new Pickle();
|
||||
}
|
||||
static createFromBuffer(buffer) {
|
||||
return new Pickle(buffer);
|
||||
}
|
||||
getHeader() {
|
||||
return this.header;
|
||||
}
|
||||
getHeaderSize() {
|
||||
return this.headerSize;
|
||||
}
|
||||
createIterator() {
|
||||
return new PickleIterator(this);
|
||||
}
|
||||
toBuffer() {
|
||||
return this.header.slice(0, this.headerSize + this.getPayloadSize());
|
||||
}
|
||||
writeBool(value) {
|
||||
return this.writeInt(value ? 1 : 0);
|
||||
}
|
||||
writeInt(value) {
|
||||
return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE);
|
||||
}
|
||||
writeUInt32(value) {
|
||||
return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE);
|
||||
}
|
||||
writeInt64(value) {
|
||||
return this.writeBytes(BigInt(value), SIZE_INT64, Buffer.prototype.writeBigInt64LE);
|
||||
}
|
||||
writeUInt64(value) {
|
||||
return this.writeBytes(BigInt(value), SIZE_UINT64, Buffer.prototype.writeBigUInt64LE);
|
||||
}
|
||||
writeFloat(value) {
|
||||
return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE);
|
||||
}
|
||||
writeDouble(value) {
|
||||
return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE);
|
||||
}
|
||||
writeString(value) {
|
||||
const length = Buffer.byteLength(value, 'utf8');
|
||||
if (!this.writeInt(length)) {
|
||||
return false;
|
||||
}
|
||||
return this.writeBytes(value, length);
|
||||
}
|
||||
setPayloadSize(payloadSize) {
|
||||
return this.header.writeUInt32LE(payloadSize, 0);
|
||||
}
|
||||
getPayloadSize() {
|
||||
return this.header.readUInt32LE(0);
|
||||
}
|
||||
writeBytes(data, length, method) {
|
||||
const dataLength = alignInt(length, SIZE_UINT32);
|
||||
const newSize = this.writeOffset + dataLength;
|
||||
if (newSize > this.capacityAfterHeader) {
|
||||
this.resize(Math.max(this.capacityAfterHeader * 2, newSize));
|
||||
}
|
||||
if (method) {
|
||||
method.call(this.header, data, this.headerSize + this.writeOffset);
|
||||
}
|
||||
else {
|
||||
this.header.write(data, this.headerSize + this.writeOffset, length);
|
||||
}
|
||||
const endOffset = this.headerSize + this.writeOffset + length;
|
||||
this.header.fill(0, endOffset, endOffset + dataLength - length);
|
||||
this.setPayloadSize(newSize);
|
||||
this.writeOffset = newSize;
|
||||
return true;
|
||||
}
|
||||
resize(newCapacity) {
|
||||
newCapacity = alignInt(newCapacity, PAYLOAD_UNIT);
|
||||
this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]);
|
||||
this.capacityAfterHeader = newCapacity;
|
||||
}
|
||||
}
|
||||
exports.Pickle = Pickle;
|
||||
//# sourceMappingURL=pickle.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/pickle.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/pickle.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
157
desktop-operator/node_modules/@electron/asar/lib/types/glob.d.ts
generated
vendored
Normal file
157
desktop-operator/node_modules/@electron/asar/lib/types/glob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* TODO(erikian): remove this file once we upgrade to the latest `glob` version.
|
||||
* https://github.com/electron/asar/pull/332#issuecomment-2435407933
|
||||
*/
|
||||
interface IMinimatchOptions {
|
||||
/**
|
||||
* Dump a ton of stuff to stderr.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
debug?: boolean | undefined;
|
||||
/**
|
||||
* Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nobrace?: boolean | undefined;
|
||||
/**
|
||||
* Disable `**` matching against multiple folder names.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noglobstar?: boolean | undefined;
|
||||
/**
|
||||
* Allow patterns to match filenames starting with a period,
|
||||
* even if the pattern does not explicitly have a period in that spot.
|
||||
*
|
||||
* Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean | undefined;
|
||||
/**
|
||||
* Disable "extglob" style patterns like `+(a|b)`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noext?: boolean | undefined;
|
||||
/**
|
||||
* Perform a case-insensitive match.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocase?: boolean | undefined;
|
||||
/**
|
||||
* When a match is not found by `minimatch.match`,
|
||||
* return a list containing the pattern itself if this option is set.
|
||||
* Otherwise, an empty list is returned if there are no matches.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonull?: boolean | undefined;
|
||||
/**
|
||||
* If set, then patterns without slashes will be matched
|
||||
* against the basename of the path if it contains slashes. For example,
|
||||
* `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
matchBase?: boolean | undefined;
|
||||
/**
|
||||
* Suppress the behavior of treating `#` at the start of a pattern as a comment.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocomment?: boolean | undefined;
|
||||
/**
|
||||
* Suppress the behavior of treating a leading `!` character as negation.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonegate?: boolean | undefined;
|
||||
/**
|
||||
* Returns from negate expressions the same as if they were not negated.
|
||||
* (Ie, true on a hit, false on a miss.)
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
flipNegate?: boolean | undefined;
|
||||
/**
|
||||
* Compare a partial path to a pattern. As long as the parts of the path that
|
||||
* are present are not contradicted by the pattern, it will be treated as a
|
||||
* match. This is useful in applications where you're walking through a
|
||||
* folder structure, and don't yet have the full path, but want to ensure that
|
||||
* you do not walk down paths that can never be a match.
|
||||
*
|
||||
* @default false
|
||||
*
|
||||
* @example
|
||||
* import minimatch = require("minimatch");
|
||||
*
|
||||
* minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
* minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d
|
||||
* minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a
|
||||
*/
|
||||
partial?: boolean;
|
||||
/**
|
||||
* Use `\\` as a path separator _only_, and _never_ as an escape
|
||||
* character. If set, all `\\` characters are replaced with `/` in
|
||||
* the pattern. Note that this makes it **impossible** to match
|
||||
* against paths containing literal glob pattern characters, but
|
||||
* allows matching with patterns constructed using `path.join()` and
|
||||
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||
* behavior of earlier versions on Windows. Please use with
|
||||
* caution, and be mindful of the caveat about Windows paths
|
||||
*
|
||||
* For legacy reasons, this is also set if
|
||||
* `options.allowWindowsEscape` is set to the exact value `false`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
windowsPathsNoEscape?: boolean;
|
||||
}
|
||||
export interface IOptions extends IMinimatchOptions {
|
||||
cwd?: string | undefined;
|
||||
root?: string | undefined;
|
||||
dot?: boolean | undefined;
|
||||
nomount?: boolean | undefined;
|
||||
mark?: boolean | undefined;
|
||||
nosort?: boolean | undefined;
|
||||
stat?: boolean | undefined;
|
||||
silent?: boolean | undefined;
|
||||
strict?: boolean | undefined;
|
||||
cache?: {
|
||||
[path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string>;
|
||||
} | undefined;
|
||||
statCache?: {
|
||||
[path: string]: false | {
|
||||
isDirectory(): boolean;
|
||||
} | undefined;
|
||||
} | undefined;
|
||||
symlinks?: {
|
||||
[path: string]: boolean | undefined;
|
||||
} | undefined;
|
||||
realpathCache?: {
|
||||
[path: string]: string;
|
||||
} | undefined;
|
||||
sync?: boolean | undefined;
|
||||
nounique?: boolean | undefined;
|
||||
nonull?: boolean | undefined;
|
||||
debug?: boolean | undefined;
|
||||
nobrace?: boolean | undefined;
|
||||
noglobstar?: boolean | undefined;
|
||||
noext?: boolean | undefined;
|
||||
nocase?: boolean | undefined;
|
||||
matchBase?: any;
|
||||
nodir?: boolean | undefined;
|
||||
ignore?: string | ReadonlyArray<string> | undefined;
|
||||
follow?: boolean | undefined;
|
||||
realpath?: boolean | undefined;
|
||||
nonegate?: boolean | undefined;
|
||||
nocomment?: boolean | undefined;
|
||||
absolute?: boolean | undefined;
|
||||
allowWindowsEscape?: boolean | undefined;
|
||||
fs?: typeof import('fs');
|
||||
}
|
||||
export {};
|
||||
3
desktop-operator/node_modules/@electron/asar/lib/types/glob.js
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/asar/lib/types/glob.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=glob.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/types/glob.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/types/glob.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/types/glob.ts"],"names":[],"mappings":""}
|
||||
13
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.d.ts
generated
vendored
Normal file
13
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
type AsarFS = typeof import('fs') & {
|
||||
mkdirp(dir: string): Promise<void>;
|
||||
mkdirpSync(dir: string): void;
|
||||
lstat: (typeof import('fs'))['promises']['lstat'];
|
||||
mkdtemp: (typeof import('fs'))['promises']['mkdtemp'];
|
||||
readFile: (typeof import('fs'))['promises']['readFile'];
|
||||
stat: (typeof import('fs'))['promises']['stat'];
|
||||
writeFile: (typeof import('fs'))['promises']['writeFile'];
|
||||
symlink: (typeof import('fs'))['promises']['symlink'];
|
||||
readlink: (typeof import('fs'))['promises']['readlink'];
|
||||
};
|
||||
declare const promisified: AsarFS;
|
||||
export default promisified;
|
||||
26
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js
generated
vendored
Normal file
26
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = 'electron' in process.versions ? require('original-fs') : require('fs');
|
||||
const promisifiedMethods = [
|
||||
'lstat',
|
||||
'mkdtemp',
|
||||
'readFile',
|
||||
'stat',
|
||||
'writeFile',
|
||||
'symlink',
|
||||
'readlink',
|
||||
];
|
||||
const promisified = {};
|
||||
for (const method of Object.keys(fs)) {
|
||||
if (promisifiedMethods.includes(method)) {
|
||||
promisified[method] = fs.promises[method];
|
||||
}
|
||||
else {
|
||||
promisified[method] = fs[method];
|
||||
}
|
||||
}
|
||||
// To make it more like fs-extra
|
||||
promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true });
|
||||
promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true });
|
||||
exports.default = promisified;
|
||||
//# sourceMappingURL=wrapped-fs.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"wrapped-fs.js","sourceRoot":"","sources":["../src/wrapped-fs.ts"],"names":[],"mappings":";;AAAA,MAAM,EAAE,GAAG,UAAU,IAAI,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAEnF,MAAM,kBAAkB,GAAG;IACzB,OAAO;IACP,SAAS;IACT,UAAU;IACV,MAAM;IACN,WAAW;IACX,SAAS;IACT,UAAU;CACX,CAAC;AAcF,MAAM,WAAW,GAAG,EAAY,CAAC;AAEjC,KAAK,MAAM,MAAM,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC;IACrC,IAAI,kBAAkB,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;QACvC,WAAmB,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;SAAM,CAAC;QACL,WAAmB,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;IAC5C,CAAC;AACH,CAAC;AACD,gCAAgC;AAChC,WAAW,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;AAC1E,WAAW,CAAC,UAAU,GAAG,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;AAEzE,kBAAe,WAAW,CAAC"}
|
||||
15
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
230
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/README.md
generated
vendored
Normal file
230
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/README.md
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
# minimatch
|
||||
|
||||
A minimal matching utility.
|
||||
|
||||
[](http://travis-ci.org/isaacs/minimatch)
|
||||
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
||||
It works by converting glob expressions into JavaScript `RegExp`
|
||||
objects.
|
||||
|
||||
## Usage
|
||||
|
||||
```javascript
|
||||
var minimatch = require("minimatch")
|
||||
|
||||
minimatch("bar.foo", "*.foo") // true!
|
||||
minimatch("bar.foo", "*.bar") // false!
|
||||
minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy!
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
* Brace Expansion
|
||||
* Extended glob matching
|
||||
* "Globstar" `**` matching
|
||||
|
||||
See:
|
||||
|
||||
* `man sh`
|
||||
* `man bash`
|
||||
* `man 3 fnmatch`
|
||||
* `man 5 gitignore`
|
||||
|
||||
## Minimatch Class
|
||||
|
||||
Create a minimatch object by instantiating the `minimatch.Minimatch` class.
|
||||
|
||||
```javascript
|
||||
var Minimatch = require("minimatch").Minimatch
|
||||
var mm = new Minimatch(pattern, options)
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
* `pattern` The original pattern the minimatch object represents.
|
||||
* `options` The options supplied to the constructor.
|
||||
* `set` A 2-dimensional array of regexp or string expressions.
|
||||
Each row in the
|
||||
array corresponds to a brace-expanded pattern. Each item in the row
|
||||
corresponds to a single path-part. For example, the pattern
|
||||
`{a,b/c}/d` would expand to a set of patterns like:
|
||||
|
||||
[ [ a, d ]
|
||||
, [ b, c, d ] ]
|
||||
|
||||
If a portion of the pattern doesn't have any "magic" in it
|
||||
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||
will be left as a string rather than converted to a regular
|
||||
expression.
|
||||
|
||||
* `regexp` Created by the `makeRe` method. A single regular expression
|
||||
expressing the entire pattern. This is useful in cases where you wish
|
||||
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||
* `negate` True if the pattern is negated.
|
||||
* `comment` True if the pattern is a comment.
|
||||
* `empty` True if the pattern is `""`.
|
||||
|
||||
### Methods
|
||||
|
||||
* `makeRe` Generate the `regexp` member if necessary, and return it.
|
||||
Will return `false` if the pattern is invalid.
|
||||
* `match(fname)` Return true if the filename matches the pattern, or
|
||||
false otherwise.
|
||||
* `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||
filename, and match it against a single row in the `regExpSet`. This
|
||||
method is mainly for internal use, but is exposed so that it can be
|
||||
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
|
||||
All other methods are internal, and will be called as necessary.
|
||||
|
||||
### minimatch(path, pattern, options)
|
||||
|
||||
Main export. Tests a path against the pattern using the options.
|
||||
|
||||
```javascript
|
||||
var isJS = minimatch(file, "*.js", { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.filter(pattern, options)
|
||||
|
||||
Returns a function that tests its
|
||||
supplied argument, suitable for use with `Array.filter`. Example:
|
||||
|
||||
```javascript
|
||||
var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.match(list, pattern, options)
|
||||
|
||||
Match against the list of
|
||||
files, in the style of fnmatch or glob. If nothing is matched, and
|
||||
options.nonull is set, then return a list containing the pattern itself.
|
||||
|
||||
```javascript
|
||||
var javascripts = minimatch.match(fileList, "*.js", {matchBase: true}))
|
||||
```
|
||||
|
||||
### minimatch.makeRe(pattern, options)
|
||||
|
||||
Make a regular expression object from the pattern.
|
||||
|
||||
## Options
|
||||
|
||||
All options are `false` by default.
|
||||
|
||||
### debug
|
||||
|
||||
Dump a ton of stuff to stderr.
|
||||
|
||||
### nobrace
|
||||
|
||||
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
|
||||
### noglobstar
|
||||
|
||||
Disable `**` matching against multiple folder names.
|
||||
|
||||
### dot
|
||||
|
||||
Allow patterns to match filenames starting with a period, even if
|
||||
the pattern does not explicitly have a period in that spot.
|
||||
|
||||
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||
is set.
|
||||
|
||||
### noext
|
||||
|
||||
Disable "extglob" style patterns like `+(a|b)`.
|
||||
|
||||
### nocase
|
||||
|
||||
Perform a case-insensitive match.
|
||||
|
||||
### nonull
|
||||
|
||||
When a match is not found by `minimatch.match`, return a list containing
|
||||
the pattern itself if this option is set. When not set, an empty list
|
||||
is returned if there are no matches.
|
||||
|
||||
### matchBase
|
||||
|
||||
If set, then patterns without slashes will be matched
|
||||
against the basename of the path if it contains slashes. For example,
|
||||
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
|
||||
### nocomment
|
||||
|
||||
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||
comment.
|
||||
|
||||
### nonegate
|
||||
|
||||
Suppress the behavior of treating a leading `!` character as negation.
|
||||
|
||||
### flipNegate
|
||||
|
||||
Returns from negate expressions the same as if they were not negated.
|
||||
(Ie, true on a hit, false on a miss.)
|
||||
|
||||
### partial
|
||||
|
||||
Compare a partial path to a pattern. As long as the parts of the path that
|
||||
are present are not contradicted by the pattern, it will be treated as a
|
||||
match. This is useful in applications where you're walking through a
|
||||
folder structure, and don't yet have the full path, but want to ensure that
|
||||
you do not walk down paths that can never be a match.
|
||||
|
||||
For example,
|
||||
|
||||
```js
|
||||
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||
```
|
||||
|
||||
### allowWindowsEscape
|
||||
|
||||
Windows path separator `\` is by default converted to `/`, which
|
||||
prohibits the usage of `\` as a escape character. This flag skips that
|
||||
behavior and allows using the escape character.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a worthwhile
|
||||
goal, some discrepancies exist between minimatch and other
|
||||
implementations, and are intentional.
|
||||
|
||||
If the pattern starts with a `!` character, then it is negated. Set the
|
||||
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||
characters normally. This is perhaps relevant if you wish to start the
|
||||
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||
characters at the start of a pattern will negate the pattern multiple
|
||||
times.
|
||||
|
||||
If a pattern starts with `#`, then it is treated as a comment, and
|
||||
will not match anything. Use `\#` to match a literal `#` at the
|
||||
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.1, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then minimatch.match returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
947
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
947
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/minimatch.js
generated
vendored
Normal file
@@ -0,0 +1,947 @@
|
||||
module.exports = minimatch
|
||||
minimatch.Minimatch = Minimatch
|
||||
|
||||
var path = (function () { try { return require('path') } catch (e) {}}()) || {
|
||||
sep: '/'
|
||||
}
|
||||
minimatch.sep = path.sep
|
||||
|
||||
var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {}
|
||||
var expand = require('brace-expansion')
|
||||
|
||||
var plTypes = {
|
||||
'!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
|
||||
'?': { open: '(?:', close: ')?' },
|
||||
'+': { open: '(?:', close: ')+' },
|
||||
'*': { open: '(?:', close: ')*' },
|
||||
'@': { open: '(?:', close: ')' }
|
||||
}
|
||||
|
||||
// any single thing other than /
|
||||
// don't need to escape / when using new RegExp()
|
||||
var qmark = '[^/]'
|
||||
|
||||
// * => any number of characters
|
||||
var star = qmark + '*?'
|
||||
|
||||
// ** when dots are allowed. Anything goes, except .. and .
|
||||
// not (^ or / followed by one or two dots followed by $ or /),
|
||||
// followed by anything, any number of times.
|
||||
var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
|
||||
|
||||
// not a ^ or / followed by a dot,
|
||||
// followed by anything, any number of times.
|
||||
var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
|
||||
|
||||
// characters that need to be escaped in RegExp.
|
||||
var reSpecials = charSet('().*{}+?[]^$\\!')
|
||||
|
||||
// "abc" -> { a:true, b:true, c:true }
|
||||
function charSet (s) {
|
||||
return s.split('').reduce(function (set, c) {
|
||||
set[c] = true
|
||||
return set
|
||||
}, {})
|
||||
}
|
||||
|
||||
// normalizes slashes.
|
||||
var slashSplit = /\/+/
|
||||
|
||||
minimatch.filter = filter
|
||||
function filter (pattern, options) {
|
||||
options = options || {}
|
||||
return function (p, i, list) {
|
||||
return minimatch(p, pattern, options)
|
||||
}
|
||||
}
|
||||
|
||||
function ext (a, b) {
|
||||
b = b || {}
|
||||
var t = {}
|
||||
Object.keys(a).forEach(function (k) {
|
||||
t[k] = a[k]
|
||||
})
|
||||
Object.keys(b).forEach(function (k) {
|
||||
t[k] = b[k]
|
||||
})
|
||||
return t
|
||||
}
|
||||
|
||||
minimatch.defaults = function (def) {
|
||||
if (!def || typeof def !== 'object' || !Object.keys(def).length) {
|
||||
return minimatch
|
||||
}
|
||||
|
||||
var orig = minimatch
|
||||
|
||||
var m = function minimatch (p, pattern, options) {
|
||||
return orig(p, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.Minimatch = function Minimatch (pattern, options) {
|
||||
return new orig.Minimatch(pattern, ext(def, options))
|
||||
}
|
||||
m.Minimatch.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options)).Minimatch
|
||||
}
|
||||
|
||||
m.filter = function filter (pattern, options) {
|
||||
return orig.filter(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.defaults = function defaults (options) {
|
||||
return orig.defaults(ext(def, options))
|
||||
}
|
||||
|
||||
m.makeRe = function makeRe (pattern, options) {
|
||||
return orig.makeRe(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.braceExpand = function braceExpand (pattern, options) {
|
||||
return orig.braceExpand(pattern, ext(def, options))
|
||||
}
|
||||
|
||||
m.match = function (list, pattern, options) {
|
||||
return orig.match(list, pattern, ext(def, options))
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
Minimatch.defaults = function (def) {
|
||||
return minimatch.defaults(def).Minimatch
|
||||
}
|
||||
|
||||
function minimatch (p, pattern, options) {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
// shortcut: comments match nothing.
|
||||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||
return false
|
||||
}
|
||||
|
||||
return new Minimatch(pattern, options).match(p)
|
||||
}
|
||||
|
||||
function Minimatch (pattern, options) {
|
||||
if (!(this instanceof Minimatch)) {
|
||||
return new Minimatch(pattern, options)
|
||||
}
|
||||
|
||||
assertValidPattern(pattern)
|
||||
|
||||
if (!options) options = {}
|
||||
|
||||
pattern = pattern.trim()
|
||||
|
||||
// windows support: need to use /, not \
|
||||
if (!options.allowWindowsEscape && path.sep !== '/') {
|
||||
pattern = pattern.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
this.options = options
|
||||
this.set = []
|
||||
this.pattern = pattern
|
||||
this.regexp = null
|
||||
this.negate = false
|
||||
this.comment = false
|
||||
this.empty = false
|
||||
this.partial = !!options.partial
|
||||
|
||||
// make the set of regexps etc.
|
||||
this.make()
|
||||
}
|
||||
|
||||
Minimatch.prototype.debug = function () {}
|
||||
|
||||
Minimatch.prototype.make = make
|
||||
function make () {
|
||||
var pattern = this.pattern
|
||||
var options = this.options
|
||||
|
||||
// empty patterns and comments match nothing.
|
||||
if (!options.nocomment && pattern.charAt(0) === '#') {
|
||||
this.comment = true
|
||||
return
|
||||
}
|
||||
if (!pattern) {
|
||||
this.empty = true
|
||||
return
|
||||
}
|
||||
|
||||
// step 1: figure out negation, etc.
|
||||
this.parseNegate()
|
||||
|
||||
// step 2: expand braces
|
||||
var set = this.globSet = this.braceExpand()
|
||||
|
||||
if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) }
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// step 3: now we have a set, so turn each one into a series of path-portion
|
||||
// matching patterns.
|
||||
// These will be regexps, except in the case of "**", which is
|
||||
// set to the GLOBSTAR object for globstar behavior,
|
||||
// and will not contain any / characters
|
||||
set = this.globParts = set.map(function (s) {
|
||||
return s.split(slashSplit)
|
||||
})
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// glob --> regexps
|
||||
set = set.map(function (s, si, set) {
|
||||
return s.map(this.parse, this)
|
||||
}, this)
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
// filter out everything that didn't compile properly.
|
||||
set = set.filter(function (s) {
|
||||
return s.indexOf(false) === -1
|
||||
})
|
||||
|
||||
this.debug(this.pattern, set)
|
||||
|
||||
this.set = set
|
||||
}
|
||||
|
||||
Minimatch.prototype.parseNegate = parseNegate
|
||||
function parseNegate () {
|
||||
var pattern = this.pattern
|
||||
var negate = false
|
||||
var options = this.options
|
||||
var negateOffset = 0
|
||||
|
||||
if (options.nonegate) return
|
||||
|
||||
for (var i = 0, l = pattern.length
|
||||
; i < l && pattern.charAt(i) === '!'
|
||||
; i++) {
|
||||
negate = !negate
|
||||
negateOffset++
|
||||
}
|
||||
|
||||
if (negateOffset) this.pattern = pattern.substr(negateOffset)
|
||||
this.negate = negate
|
||||
}
|
||||
|
||||
// Brace expansion:
|
||||
// a{b,c}d -> abd acd
|
||||
// a{b,}c -> abc ac
|
||||
// a{0..3}d -> a0d a1d a2d a3d
|
||||
// a{b,c{d,e}f}g -> abg acdfg acefg
|
||||
// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
|
||||
//
|
||||
// Invalid sets are not expanded.
|
||||
// a{2..}b -> a{2..}b
|
||||
// a{b}c -> a{b}c
|
||||
minimatch.braceExpand = function (pattern, options) {
|
||||
return braceExpand(pattern, options)
|
||||
}
|
||||
|
||||
Minimatch.prototype.braceExpand = braceExpand
|
||||
|
||||
function braceExpand (pattern, options) {
|
||||
if (!options) {
|
||||
if (this instanceof Minimatch) {
|
||||
options = this.options
|
||||
} else {
|
||||
options = {}
|
||||
}
|
||||
}
|
||||
|
||||
pattern = typeof pattern === 'undefined'
|
||||
? this.pattern : pattern
|
||||
|
||||
assertValidPattern(pattern)
|
||||
|
||||
// Thanks to Yeting Li <https://github.com/yetingli> for
|
||||
// improving this regexp to avoid a ReDOS vulnerability.
|
||||
if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
|
||||
// shortcut. no need to expand.
|
||||
return [pattern]
|
||||
}
|
||||
|
||||
return expand(pattern)
|
||||
}
|
||||
|
||||
var MAX_PATTERN_LENGTH = 1024 * 64
|
||||
var assertValidPattern = function (pattern) {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern')
|
||||
}
|
||||
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long')
|
||||
}
|
||||
}
|
||||
|
||||
// parse a component of the expanded set.
|
||||
// At this point, no pattern may contain "/" in it
|
||||
// so we're going to return a 2d array, where each entry is the full
|
||||
// pattern, split on '/', and then turned into a regular expression.
|
||||
// A regexp is made at the end which joins each array with an
|
||||
// escaped /, and another full one which joins each regexp with |.
|
||||
//
|
||||
// Following the lead of Bash 4.1, note that "**" only has special meaning
|
||||
// when it is the *only* thing in a path portion. Otherwise, any series
|
||||
// of * is equivalent to a single *. Globstar behavior is enabled by
|
||||
// default, and can be disabled by setting options.noglobstar.
|
||||
Minimatch.prototype.parse = parse
|
||||
var SUBPARSE = {}
|
||||
function parse (pattern, isSub) {
|
||||
assertValidPattern(pattern)
|
||||
|
||||
var options = this.options
|
||||
|
||||
// shortcuts
|
||||
if (pattern === '**') {
|
||||
if (!options.noglobstar)
|
||||
return GLOBSTAR
|
||||
else
|
||||
pattern = '*'
|
||||
}
|
||||
if (pattern === '') return ''
|
||||
|
||||
var re = ''
|
||||
var hasMagic = !!options.nocase
|
||||
var escaping = false
|
||||
// ? => one single character
|
||||
var patternListStack = []
|
||||
var negativeLists = []
|
||||
var stateChar
|
||||
var inClass = false
|
||||
var reClassStart = -1
|
||||
var classStart = -1
|
||||
// . and .. never match anything that doesn't start with .,
|
||||
// even when options.dot is set.
|
||||
var patternStart = pattern.charAt(0) === '.' ? '' // anything
|
||||
// not (start or / followed by . or .. followed by / or end)
|
||||
: options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
|
||||
: '(?!\\.)'
|
||||
var self = this
|
||||
|
||||
function clearStateChar () {
|
||||
if (stateChar) {
|
||||
// we had some state-tracking character
|
||||
// that wasn't consumed by this pass.
|
||||
switch (stateChar) {
|
||||
case '*':
|
||||
re += star
|
||||
hasMagic = true
|
||||
break
|
||||
case '?':
|
||||
re += qmark
|
||||
hasMagic = true
|
||||
break
|
||||
default:
|
||||
re += '\\' + stateChar
|
||||
break
|
||||
}
|
||||
self.debug('clearStateChar %j %j', stateChar, re)
|
||||
stateChar = false
|
||||
}
|
||||
}
|
||||
|
||||
for (var i = 0, len = pattern.length, c
|
||||
; (i < len) && (c = pattern.charAt(i))
|
||||
; i++) {
|
||||
this.debug('%s\t%s %s %j', pattern, i, re, c)
|
||||
|
||||
// skip over any that are escaped.
|
||||
if (escaping && reSpecials[c]) {
|
||||
re += '\\' + c
|
||||
escaping = false
|
||||
continue
|
||||
}
|
||||
|
||||
switch (c) {
|
||||
/* istanbul ignore next */
|
||||
case '/': {
|
||||
// completely not allowed, even escaped.
|
||||
// Should already be path-split by now.
|
||||
return false
|
||||
}
|
||||
|
||||
case '\\':
|
||||
clearStateChar()
|
||||
escaping = true
|
||||
continue
|
||||
|
||||
// the various stateChar values
|
||||
// for the "extglob" stuff.
|
||||
case '?':
|
||||
case '*':
|
||||
case '+':
|
||||
case '@':
|
||||
case '!':
|
||||
this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
|
||||
|
||||
// all of those are literals inside a class, except that
|
||||
// the glob [!a] means [^a] in regexp
|
||||
if (inClass) {
|
||||
this.debug(' in class')
|
||||
if (c === '!' && i === classStart + 1) c = '^'
|
||||
re += c
|
||||
continue
|
||||
}
|
||||
|
||||
// if we already have a stateChar, then it means
|
||||
// that there was something like ** or +? in there.
|
||||
// Handle the stateChar, then proceed with this one.
|
||||
self.debug('call clearStateChar %j', stateChar)
|
||||
clearStateChar()
|
||||
stateChar = c
|
||||
// if extglob is disabled, then +(asdf|foo) isn't a thing.
|
||||
// just clear the statechar *now*, rather than even diving into
|
||||
// the patternList stuff.
|
||||
if (options.noext) clearStateChar()
|
||||
continue
|
||||
|
||||
case '(':
|
||||
if (inClass) {
|
||||
re += '('
|
||||
continue
|
||||
}
|
||||
|
||||
if (!stateChar) {
|
||||
re += '\\('
|
||||
continue
|
||||
}
|
||||
|
||||
patternListStack.push({
|
||||
type: stateChar,
|
||||
start: i - 1,
|
||||
reStart: re.length,
|
||||
open: plTypes[stateChar].open,
|
||||
close: plTypes[stateChar].close
|
||||
})
|
||||
// negation is (?:(?!js)[^/]*)
|
||||
re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
|
||||
this.debug('plType %j %j', stateChar, re)
|
||||
stateChar = false
|
||||
continue
|
||||
|
||||
case ')':
|
||||
if (inClass || !patternListStack.length) {
|
||||
re += '\\)'
|
||||
continue
|
||||
}
|
||||
|
||||
clearStateChar()
|
||||
hasMagic = true
|
||||
var pl = patternListStack.pop()
|
||||
// negation is (?:(?!js)[^/]*)
|
||||
// The others are (?:<pattern>)<type>
|
||||
re += pl.close
|
||||
if (pl.type === '!') {
|
||||
negativeLists.push(pl)
|
||||
}
|
||||
pl.reEnd = re.length
|
||||
continue
|
||||
|
||||
case '|':
|
||||
if (inClass || !patternListStack.length || escaping) {
|
||||
re += '\\|'
|
||||
escaping = false
|
||||
continue
|
||||
}
|
||||
|
||||
clearStateChar()
|
||||
re += '|'
|
||||
continue
|
||||
|
||||
// these are mostly the same in regexp and glob
|
||||
case '[':
|
||||
// swallow any state-tracking char before the [
|
||||
clearStateChar()
|
||||
|
||||
if (inClass) {
|
||||
re += '\\' + c
|
||||
continue
|
||||
}
|
||||
|
||||
inClass = true
|
||||
classStart = i
|
||||
reClassStart = re.length
|
||||
re += c
|
||||
continue
|
||||
|
||||
case ']':
|
||||
// a right bracket shall lose its special
|
||||
// meaning and represent itself in
|
||||
// a bracket expression if it occurs
|
||||
// first in the list. -- POSIX.2 2.8.3.2
|
||||
if (i === classStart + 1 || !inClass) {
|
||||
re += '\\' + c
|
||||
escaping = false
|
||||
continue
|
||||
}
|
||||
|
||||
// handle the case where we left a class open.
|
||||
// "[z-a]" is valid, equivalent to "\[z-a\]"
|
||||
// split where the last [ was, make sure we don't have
|
||||
// an invalid re. if so, re-walk the contents of the
|
||||
// would-be class to re-translate any characters that
|
||||
// were passed through as-is
|
||||
// TODO: It would probably be faster to determine this
|
||||
// without a try/catch and a new RegExp, but it's tricky
|
||||
// to do safely. For now, this is safe and works.
|
||||
var cs = pattern.substring(classStart + 1, i)
|
||||
try {
|
||||
RegExp('[' + cs + ']')
|
||||
} catch (er) {
|
||||
// not a valid class!
|
||||
var sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]'
|
||||
hasMagic = hasMagic || sp[1]
|
||||
inClass = false
|
||||
continue
|
||||
}
|
||||
|
||||
// finish up the class.
|
||||
hasMagic = true
|
||||
inClass = false
|
||||
re += c
|
||||
continue
|
||||
|
||||
default:
|
||||
// swallow any state char that wasn't consumed
|
||||
clearStateChar()
|
||||
|
||||
if (escaping) {
|
||||
// no need
|
||||
escaping = false
|
||||
} else if (reSpecials[c]
|
||||
&& !(c === '^' && inClass)) {
|
||||
re += '\\'
|
||||
}
|
||||
|
||||
re += c
|
||||
|
||||
} // switch
|
||||
} // for
|
||||
|
||||
// handle the case where we left a class open.
|
||||
// "[abc" is valid, equivalent to "\[abc"
|
||||
if (inClass) {
|
||||
// split where the last [ was, and escape it
|
||||
// this is a huge pita. We now have to re-walk
|
||||
// the contents of the would-be class to re-translate
|
||||
// any characters that were passed through as-is
|
||||
cs = pattern.substr(classStart + 1)
|
||||
sp = this.parse(cs, SUBPARSE)
|
||||
re = re.substr(0, reClassStart) + '\\[' + sp[0]
|
||||
hasMagic = hasMagic || sp[1]
|
||||
}
|
||||
|
||||
// handle the case where we had a +( thing at the *end*
|
||||
// of the pattern.
|
||||
// each pattern list stack adds 3 chars, and we need to go through
|
||||
// and escape any | chars that were passed through as-is for the regexp.
|
||||
// Go through and escape them, taking care not to double-escape any
|
||||
// | chars that were already escaped.
|
||||
for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
|
||||
var tail = re.slice(pl.reStart + pl.open.length)
|
||||
this.debug('setting tail', re, pl)
|
||||
// maybe some even number of \, then maybe 1 \, followed by a |
|
||||
tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) {
|
||||
if (!$2) {
|
||||
// the | isn't already escaped, so escape it.
|
||||
$2 = '\\'
|
||||
}
|
||||
|
||||
// need to escape all those slashes *again*, without escaping the
|
||||
// one that we need for escaping the | character. As it works out,
|
||||
// escaping an even number of slashes can be done by simply repeating
|
||||
// it exactly after itself. That's why this trick works.
|
||||
//
|
||||
// I am sorry that you have to see this.
|
||||
return $1 + $1 + $2 + '|'
|
||||
})
|
||||
|
||||
this.debug('tail=%j\n %s', tail, tail, pl, re)
|
||||
var t = pl.type === '*' ? star
|
||||
: pl.type === '?' ? qmark
|
||||
: '\\' + pl.type
|
||||
|
||||
hasMagic = true
|
||||
re = re.slice(0, pl.reStart) + t + '\\(' + tail
|
||||
}
|
||||
|
||||
// handle trailing things that only matter at the very end.
|
||||
clearStateChar()
|
||||
if (escaping) {
|
||||
// trailing \\
|
||||
re += '\\\\'
|
||||
}
|
||||
|
||||
// only need to apply the nodot start if the re starts with
|
||||
// something that could conceivably capture a dot
|
||||
var addPatternStart = false
|
||||
switch (re.charAt(0)) {
|
||||
case '[': case '.': case '(': addPatternStart = true
|
||||
}
|
||||
|
||||
// Hack to work around lack of negative lookbehind in JS
|
||||
// A pattern like: *.!(x).!(y|z) needs to ensure that a name
|
||||
// like 'a.xyz.yz' doesn't match. So, the first negative
|
||||
// lookahead, has to look ALL the way ahead, to the end of
|
||||
// the pattern.
|
||||
for (var n = negativeLists.length - 1; n > -1; n--) {
|
||||
var nl = negativeLists[n]
|
||||
|
||||
var nlBefore = re.slice(0, nl.reStart)
|
||||
var nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
|
||||
var nlLast = re.slice(nl.reEnd - 8, nl.reEnd)
|
||||
var nlAfter = re.slice(nl.reEnd)
|
||||
|
||||
nlLast += nlAfter
|
||||
|
||||
// Handle nested stuff like *(*.js|!(*.json)), where open parens
|
||||
// mean that we should *not* include the ) in the bit that is considered
|
||||
// "after" the negated section.
|
||||
var openParensBefore = nlBefore.split('(').length - 1
|
||||
var cleanAfter = nlAfter
|
||||
for (i = 0; i < openParensBefore; i++) {
|
||||
cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
|
||||
}
|
||||
nlAfter = cleanAfter
|
||||
|
||||
var dollar = ''
|
||||
if (nlAfter === '' && isSub !== SUBPARSE) {
|
||||
dollar = '$'
|
||||
}
|
||||
var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast
|
||||
re = newRe
|
||||
}
|
||||
|
||||
// if the re is not "" at this point, then we need to make sure
|
||||
// it doesn't match against an empty path part.
|
||||
// Otherwise a/* will match a/, which it should not.
|
||||
if (re !== '' && hasMagic) {
|
||||
re = '(?=.)' + re
|
||||
}
|
||||
|
||||
if (addPatternStart) {
|
||||
re = patternStart + re
|
||||
}
|
||||
|
||||
// parsing just a piece of a larger pattern.
|
||||
if (isSub === SUBPARSE) {
|
||||
return [re, hasMagic]
|
||||
}
|
||||
|
||||
// skip the regexp for non-magical patterns
|
||||
// unescape anything in it, though, so that it'll be
|
||||
// an exact match against a file etc.
|
||||
if (!hasMagic) {
|
||||
return globUnescape(pattern)
|
||||
}
|
||||
|
||||
var flags = options.nocase ? 'i' : ''
|
||||
try {
|
||||
var regExp = new RegExp('^' + re + '$', flags)
|
||||
} catch (er) /* istanbul ignore next - should be impossible */ {
|
||||
// If it was an invalid regular expression, then it can't match
|
||||
// anything. This trick looks for a character after the end of
|
||||
// the string, which is of course impossible, except in multi-line
|
||||
// mode, but it's not a /m regex.
|
||||
return new RegExp('$.')
|
||||
}
|
||||
|
||||
regExp._glob = pattern
|
||||
regExp._src = re
|
||||
|
||||
return regExp
|
||||
}
|
||||
|
||||
minimatch.makeRe = function (pattern, options) {
|
||||
return new Minimatch(pattern, options || {}).makeRe()
|
||||
}
|
||||
|
||||
Minimatch.prototype.makeRe = makeRe
|
||||
function makeRe () {
|
||||
if (this.regexp || this.regexp === false) return this.regexp
|
||||
|
||||
// at this point, this.set is a 2d array of partial
|
||||
// pattern strings, or "**".
|
||||
//
|
||||
// It's better to use .match(). This function shouldn't
|
||||
// be used, really, but it's pretty convenient sometimes,
|
||||
// when you just want to work with a regex.
|
||||
var set = this.set
|
||||
|
||||
if (!set.length) {
|
||||
this.regexp = false
|
||||
return this.regexp
|
||||
}
|
||||
var options = this.options
|
||||
|
||||
var twoStar = options.noglobstar ? star
|
||||
: options.dot ? twoStarDot
|
||||
: twoStarNoDot
|
||||
var flags = options.nocase ? 'i' : ''
|
||||
|
||||
var re = set.map(function (pattern) {
|
||||
return pattern.map(function (p) {
|
||||
return (p === GLOBSTAR) ? twoStar
|
||||
: (typeof p === 'string') ? regExpEscape(p)
|
||||
: p._src
|
||||
}).join('\\\/')
|
||||
}).join('|')
|
||||
|
||||
// must match entire pattern
|
||||
// ending in a * or ** will make it less strict.
|
||||
re = '^(?:' + re + ')$'
|
||||
|
||||
// can match anything, as long as it's not this.
|
||||
if (this.negate) re = '^(?!' + re + ').*$'
|
||||
|
||||
try {
|
||||
this.regexp = new RegExp(re, flags)
|
||||
} catch (ex) /* istanbul ignore next - should be impossible */ {
|
||||
this.regexp = false
|
||||
}
|
||||
return this.regexp
|
||||
}
|
||||
|
||||
minimatch.match = function (list, pattern, options) {
|
||||
options = options || {}
|
||||
var mm = new Minimatch(pattern, options)
|
||||
list = list.filter(function (f) {
|
||||
return mm.match(f)
|
||||
})
|
||||
if (mm.options.nonull && !list.length) {
|
||||
list.push(pattern)
|
||||
}
|
||||
return list
|
||||
}
|
||||
|
||||
Minimatch.prototype.match = function match (f, partial) {
|
||||
if (typeof partial === 'undefined') partial = this.partial
|
||||
this.debug('match', f, this.pattern)
|
||||
// short-circuit in the case of busted things.
|
||||
// comments, etc.
|
||||
if (this.comment) return false
|
||||
if (this.empty) return f === ''
|
||||
|
||||
if (f === '/' && partial) return true
|
||||
|
||||
var options = this.options
|
||||
|
||||
// windows: need to use /, not \
|
||||
if (path.sep !== '/') {
|
||||
f = f.split(path.sep).join('/')
|
||||
}
|
||||
|
||||
// treat the test path as a set of pathparts.
|
||||
f = f.split(slashSplit)
|
||||
this.debug(this.pattern, 'split', f)
|
||||
|
||||
// just ONE of the pattern sets in this.set needs to match
|
||||
// in order for it to be valid. If negating, then just one
|
||||
// match means that we have failed.
|
||||
// Either way, return on the first hit.
|
||||
|
||||
var set = this.set
|
||||
this.debug(this.pattern, 'set', set)
|
||||
|
||||
// Find the basename of the path by looking for the last non-empty segment
|
||||
var filename
|
||||
var i
|
||||
for (i = f.length - 1; i >= 0; i--) {
|
||||
filename = f[i]
|
||||
if (filename) break
|
||||
}
|
||||
|
||||
for (i = 0; i < set.length; i++) {
|
||||
var pattern = set[i]
|
||||
var file = f
|
||||
if (options.matchBase && pattern.length === 1) {
|
||||
file = [filename]
|
||||
}
|
||||
var hit = this.matchOne(file, pattern, partial)
|
||||
if (hit) {
|
||||
if (options.flipNegate) return true
|
||||
return !this.negate
|
||||
}
|
||||
}
|
||||
|
||||
// didn't get any hits. this is success if it's a negative
|
||||
// pattern, failure otherwise.
|
||||
if (options.flipNegate) return false
|
||||
return this.negate
|
||||
}
|
||||
|
||||
// set partial to true to test if, for example,
|
||||
// "/a/b" matches the start of "/*/b/*/d"
|
||||
// Partial means, if you run out of file before you run
|
||||
// out of pattern, then that's fine, as long as all
|
||||
// the parts match.
|
||||
Minimatch.prototype.matchOne = function (file, pattern, partial) {
|
||||
var options = this.options
|
||||
|
||||
this.debug('matchOne',
|
||||
{ 'this': this, file: file, pattern: pattern })
|
||||
|
||||
this.debug('matchOne', file.length, pattern.length)
|
||||
|
||||
for (var fi = 0,
|
||||
pi = 0,
|
||||
fl = file.length,
|
||||
pl = pattern.length
|
||||
; (fi < fl) && (pi < pl)
|
||||
; fi++, pi++) {
|
||||
this.debug('matchOne loop')
|
||||
var p = pattern[pi]
|
||||
var f = file[fi]
|
||||
|
||||
this.debug(pattern, p, f)
|
||||
|
||||
// should be impossible.
|
||||
// some invalid regexp stuff in the set.
|
||||
/* istanbul ignore if */
|
||||
if (p === false) return false
|
||||
|
||||
if (p === GLOBSTAR) {
|
||||
this.debug('GLOBSTAR', [pattern, p, f])
|
||||
|
||||
// "**"
|
||||
// a/**/b/**/c would match the following:
|
||||
// a/b/x/y/z/c
|
||||
// a/x/y/z/b/c
|
||||
// a/b/x/b/x/c
|
||||
// a/b/c
|
||||
// To do this, take the rest of the pattern after
|
||||
// the **, and see if it would match the file remainder.
|
||||
// If so, return success.
|
||||
// If not, the ** "swallows" a segment, and try again.
|
||||
// This is recursively awful.
|
||||
//
|
||||
// a/**/b/**/c matching a/b/x/y/z/c
|
||||
// - a matches a
|
||||
// - doublestar
|
||||
// - matchOne(b/x/y/z/c, b/**/c)
|
||||
// - b matches b
|
||||
// - doublestar
|
||||
// - matchOne(x/y/z/c, c) -> no
|
||||
// - matchOne(y/z/c, c) -> no
|
||||
// - matchOne(z/c, c) -> no
|
||||
// - matchOne(c, c) yes, hit
|
||||
var fr = fi
|
||||
var pr = pi + 1
|
||||
if (pr === pl) {
|
||||
this.debug('** at the end')
|
||||
// a ** at the end will just swallow the rest.
|
||||
// We have found a match.
|
||||
// however, it will not swallow /.x, unless
|
||||
// options.dot is set.
|
||||
// . and .. are *never* matched by **, for explosively
|
||||
// exponential reasons.
|
||||
for (; fi < fl; fi++) {
|
||||
if (file[fi] === '.' || file[fi] === '..' ||
|
||||
(!options.dot && file[fi].charAt(0) === '.')) return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// ok, let's see if we can swallow whatever we can.
|
||||
while (fr < fl) {
|
||||
var swallowee = file[fr]
|
||||
|
||||
this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
|
||||
|
||||
// XXX remove this slice. Just pass the start index.
|
||||
if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
|
||||
this.debug('globstar found match!', fr, fl, swallowee)
|
||||
// found a match.
|
||||
return true
|
||||
} else {
|
||||
// can't swallow "." or ".." ever.
|
||||
// can only swallow ".foo" when explicitly asked.
|
||||
if (swallowee === '.' || swallowee === '..' ||
|
||||
(!options.dot && swallowee.charAt(0) === '.')) {
|
||||
this.debug('dot detected!', file, fr, pattern, pr)
|
||||
break
|
||||
}
|
||||
|
||||
// ** swallows a segment, and continue.
|
||||
this.debug('globstar swallow a segment, and continue')
|
||||
fr++
|
||||
}
|
||||
}
|
||||
|
||||
// no match was found.
|
||||
// However, in partial mode, we can't say this is necessarily over.
|
||||
// If there's more *pattern* left, then
|
||||
/* istanbul ignore if */
|
||||
if (partial) {
|
||||
// ran out of file
|
||||
this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
|
||||
if (fr === fl) return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// something other than **
|
||||
// non-magic patterns just have to match exactly
|
||||
// patterns with magic have been turned into regexps.
|
||||
var hit
|
||||
if (typeof p === 'string') {
|
||||
hit = f === p
|
||||
this.debug('string match', p, f, hit)
|
||||
} else {
|
||||
hit = f.match(p)
|
||||
this.debug('pattern match', p, f, hit)
|
||||
}
|
||||
|
||||
if (!hit) return false
|
||||
}
|
||||
|
||||
// Note: ending in / means that we'll get a final ""
|
||||
// at the end of the pattern. This can only match a
|
||||
// corresponding "" at the end of the file.
|
||||
// If the file ends in /, then it can only match a
|
||||
// a pattern that ends in /, unless the pattern just
|
||||
// doesn't have any more for it. But, a/b/ should *not*
|
||||
// match "a/b/*", even though "" matches against the
|
||||
// [^/]*? pattern, except in partial mode, where it might
|
||||
// simply not be reached yet.
|
||||
// However, a/b/ should still satisfy a/*
|
||||
|
||||
// now either we fell off the end of the pattern, or we're done.
|
||||
if (fi === fl && pi === pl) {
|
||||
// ran out of pattern and filename at the same time.
|
||||
// an exact hit!
|
||||
return true
|
||||
} else if (fi === fl) {
|
||||
// ran out of file, but still had pattern left.
|
||||
// this is ok if we're doing the match as part of
|
||||
// a glob fs traversal.
|
||||
return partial
|
||||
} else /* istanbul ignore else */ if (pi === pl) {
|
||||
// ran out of pattern, still have file left.
|
||||
// this is only acceptable if we're on the very last
|
||||
// empty segment of a file with a trailing slash.
|
||||
// a/* should match a/b/
|
||||
return (fi === fl - 1) && (file[fi] === '')
|
||||
}
|
||||
|
||||
// should be unreachable.
|
||||
/* istanbul ignore next */
|
||||
throw new Error('wtf?')
|
||||
}
|
||||
|
||||
// replace stuff like \* with *
|
||||
function globUnescape (s) {
|
||||
return s.replace(/\\(.)/g, '$1')
|
||||
}
|
||||
|
||||
function regExpEscape (s) {
|
||||
return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
|
||||
}
|
||||
33
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/package.json
generated
vendored
Normal file
33
desktop-operator/node_modules/@electron/asar/node_modules/minimatch/package.json
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
{
|
||||
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me)",
|
||||
"name": "minimatch",
|
||||
"description": "a glob matcher in javascript",
|
||||
"version": "3.1.2",
|
||||
"publishConfig": {
|
||||
"tag": "v3-legacy"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/isaacs/minimatch.git"
|
||||
},
|
||||
"main": "minimatch.js",
|
||||
"scripts": {
|
||||
"test": "tap",
|
||||
"preversion": "npm test",
|
||||
"postversion": "npm publish",
|
||||
"postpublish": "git push origin --all; git push origin --tags"
|
||||
},
|
||||
"engines": {
|
||||
"node": "*"
|
||||
},
|
||||
"dependencies": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"tap": "^15.1.6"
|
||||
},
|
||||
"license": "ISC",
|
||||
"files": [
|
||||
"minimatch.js"
|
||||
]
|
||||
}
|
||||
60
desktop-operator/node_modules/@electron/asar/package.json
generated
vendored
Normal file
60
desktop-operator/node_modules/@electron/asar/package.json
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
{
|
||||
"name": "@electron/asar",
|
||||
"description": "Creating Electron app packages",
|
||||
"version": "3.4.1",
|
||||
"main": "./lib/asar.js",
|
||||
"types": "./lib/asar.d.ts",
|
||||
"bin": {
|
||||
"asar": "./bin/asar.js"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"lib"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=10.12.0"
|
||||
},
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/electron/asar",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/electron/asar.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/electron/asar/issues"
|
||||
},
|
||||
"publishConfig": {
|
||||
"provenance": true
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"mocha": "xvfb-maybe electron-mocha && mocha",
|
||||
"mocha:update": "mocha --update",
|
||||
"mocha:watch": "mocha --watch",
|
||||
"test": "yarn lint && yarn mocha",
|
||||
"lint": "yarn prettier:check",
|
||||
"prettier": "prettier \"src/**/*.ts\" \"test/**/*.js\" \"*.js\"",
|
||||
"prettier:check": "yarn prettier --check",
|
||||
"prettier:write": "yarn prettier --write",
|
||||
"prepare": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"commander": "^5.0.0",
|
||||
"glob": "^7.1.6",
|
||||
"minimatch": "^3.0.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/minimatch": "^3.0.5",
|
||||
"@types/node": "^12.0.0",
|
||||
"chai": "^4.5.0",
|
||||
"electron": "^22.0.0",
|
||||
"electron-mocha": "^13.0.1",
|
||||
"lodash": "^4.17.15",
|
||||
"mocha": "^10.1.0",
|
||||
"mocha-chai-jest-snapshot": "^1.1.6",
|
||||
"prettier": "^3.3.3",
|
||||
"rimraf": "^3.0.2",
|
||||
"typescript": "^5.5.4",
|
||||
"xvfb-maybe": "^0.2.1"
|
||||
}
|
||||
}
|
||||
21
desktop-operator/node_modules/@electron/get/LICENSE
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/get/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Contributors to the Electron project
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
138
desktop-operator/node_modules/@electron/get/README.md
generated
vendored
Normal file
138
desktop-operator/node_modules/@electron/get/README.md
generated
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
# @electron/get
|
||||
|
||||
> Download Electron release artifacts
|
||||
|
||||
[](https://circleci.com/gh/electron/get)
|
||||
[](https://npm.im/@electron/get)
|
||||
|
||||
## Usage
|
||||
|
||||
### Simple: Downloading an Electron Binary ZIP
|
||||
|
||||
```typescript
|
||||
import { download } from '@electron/get';
|
||||
|
||||
// NB: Use this syntax within an async function, Node does not have support for
|
||||
// top-level await as of Node 12.
|
||||
const zipFilePath = await download('4.0.4');
|
||||
```
|
||||
|
||||
### Advanced: Downloading a macOS Electron Symbol File
|
||||
|
||||
|
||||
```typescript
|
||||
import { downloadArtifact } from '@electron/get';
|
||||
|
||||
// NB: Use this syntax within an async function, Node does not have support for
|
||||
// top-level await as of Node 12.
|
||||
const zipFilePath = await downloadArtifact({
|
||||
version: '4.0.4',
|
||||
platform: 'darwin',
|
||||
artifactName: 'electron',
|
||||
artifactSuffix: 'symbols',
|
||||
arch: 'x64',
|
||||
});
|
||||
```
|
||||
|
||||
### Specifying a mirror
|
||||
|
||||
To specify another location to download Electron assets from, the following options are
|
||||
available:
|
||||
|
||||
* `mirrorOptions` Object
|
||||
* `mirror` String (optional) - The base URL of the mirror to download from.
|
||||
* `nightlyMirror` String (optional) - The Electron nightly-specific mirror URL.
|
||||
* `customDir` String (optional) - The name of the directory to download from, often scoped by version number.
|
||||
* `customFilename` String (optional) - The name of the asset to download.
|
||||
* `resolveAssetURL` Function (optional) - A function allowing customization of the url used to download the asset.
|
||||
|
||||
Anatomy of a download URL, in terms of `mirrorOptions`:
|
||||
|
||||
```
|
||||
https://github.com/electron/electron/releases/download/v4.0.4/electron-v4.0.4-linux-x64.zip
|
||||
| | | |
|
||||
------------------------------------------------------- -----------------------------
|
||||
| |
|
||||
mirror / nightlyMirror | | customFilename
|
||||
------
|
||||
||
|
||||
customDir
|
||||
```
|
||||
|
||||
Example:
|
||||
|
||||
```typescript
|
||||
import { download } from '@electron/get';
|
||||
|
||||
const zipFilePath = await download('4.0.4', {
|
||||
mirrorOptions: {
|
||||
mirror: 'https://mirror.example.com/electron/',
|
||||
customDir: 'custom',
|
||||
customFilename: 'unofficial-electron-linux.zip'
|
||||
}
|
||||
});
|
||||
// Will download from https://mirror.example.com/electron/custom/unofficial-electron-linux.zip
|
||||
|
||||
const nightlyZipFilePath = await download('8.0.0-nightly.20190901', {
|
||||
mirrorOptions: {
|
||||
nightlyMirror: 'https://nightly.example.com/',
|
||||
customDir: 'nightlies',
|
||||
customFilename: 'nightly-linux.zip'
|
||||
}
|
||||
});
|
||||
// Will download from https://nightly.example.com/nightlies/nightly-linux.zip
|
||||
```
|
||||
|
||||
`customDir` can have the placeholder `{{ version }}`, which will be replaced by the version
|
||||
specified (without the leading `v`). For example:
|
||||
|
||||
```javascript
|
||||
const zipFilePath = await download('4.0.4', {
|
||||
mirrorOptions: {
|
||||
mirror: 'https://mirror.example.com/electron/',
|
||||
customDir: 'version-{{ version }}',
|
||||
platform: 'linux',
|
||||
arch: 'x64'
|
||||
}
|
||||
});
|
||||
// Will download from https://mirror.example.com/electron/version-4.0.4/electron-v4.0.4-linux-x64.zip
|
||||
```
|
||||
|
||||
#### Using environment variables for mirror options
|
||||
Mirror options can also be specified via the following environment variables:
|
||||
* `ELECTRON_CUSTOM_DIR` - Specifies the custom directory to download from.
|
||||
* `ELECTRON_CUSTOM_FILENAME` - Specifies the custom file name to download.
|
||||
* `ELECTRON_MIRROR` - Specifies the URL of the server to download from if the version is not a nightly version.
|
||||
* `ELECTRON_NIGHTLY_MIRROR` - Specifies the URL of the server to download from if the version is a nightly version.
|
||||
|
||||
### Overriding the version downloaded
|
||||
|
||||
The version downloaded can be overriden by setting the `ELECTRON_CUSTOM_VERSION` environment variable.
|
||||
Setting this environment variable will override the version passed in to `download` or `downloadArtifact`.
|
||||
|
||||
## How It Works
|
||||
|
||||
This module downloads Electron to a known place on your system and caches it
|
||||
so that future requests for that asset can be returned instantly. The cache
|
||||
locations are:
|
||||
|
||||
* Linux: `$XDG_CACHE_HOME` or `~/.cache/electron/`
|
||||
* MacOS: `~/Library/Caches/electron/`
|
||||
* Windows: `%LOCALAPPDATA%/electron/Cache` or `~/AppData/Local/electron/Cache/`
|
||||
|
||||
By default, the module uses [`got`](https://github.com/sindresorhus/got) as the
|
||||
downloader. As a result, you can use the same [options](https://github.com/sindresorhus/got#options)
|
||||
via `downloadOptions`.
|
||||
|
||||
### Progress Bar
|
||||
|
||||
By default, a progress bar is shown when downloading an artifact for more than 30 seconds. To
|
||||
disable, set the `ELECTRON_GET_NO_PROGRESS` environment variable to any non-empty value, or set
|
||||
`quiet` to `true` in `downloadOptions`. If you need to monitor progress yourself via the API, set
|
||||
`getProgressCallback` in `downloadOptions`, which has the same function signature as `got`'s
|
||||
[`downloadProgress` event callback](https://github.com/sindresorhus/got#ondownloadprogress-progress).
|
||||
|
||||
### Proxies
|
||||
|
||||
Downstream packages should utilize the `initializeProxy` function to add HTTP(S) proxy support. If
|
||||
the environment variable `ELECTRON_GET_USE_PROXY` is set, it is called automatically.
|
||||
8
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.d.ts
generated
vendored
Normal file
8
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export declare class Cache {
|
||||
private cacheRoot;
|
||||
constructor(cacheRoot?: string);
|
||||
static getCacheDirectory(downloadUrl: string): string;
|
||||
getCachePath(downloadUrl: string, fileName: string): string;
|
||||
getPathForFileInCache(url: string, fileName: string): Promise<string | null>;
|
||||
putFileInCache(url: string, currentPath: string, fileName: string): Promise<string>;
|
||||
}
|
||||
60
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.js
generated
vendored
Normal file
60
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.js
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const debug_1 = require("debug");
|
||||
const env_paths_1 = require("env-paths");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const url = require("url");
|
||||
const crypto = require("crypto");
|
||||
const d = debug_1.default('@electron/get:cache');
|
||||
const defaultCacheRoot = env_paths_1.default('electron', {
|
||||
suffix: '',
|
||||
}).cache;
|
||||
class Cache {
|
||||
constructor(cacheRoot = defaultCacheRoot) {
|
||||
this.cacheRoot = cacheRoot;
|
||||
}
|
||||
static getCacheDirectory(downloadUrl) {
|
||||
const parsedDownloadUrl = url.parse(downloadUrl);
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { search, hash, pathname } = parsedDownloadUrl, rest = __rest(parsedDownloadUrl, ["search", "hash", "pathname"]);
|
||||
const strippedUrl = url.format(Object.assign(Object.assign({}, rest), { pathname: path.dirname(pathname || 'electron') }));
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(strippedUrl)
|
||||
.digest('hex');
|
||||
}
|
||||
getCachePath(downloadUrl, fileName) {
|
||||
return path.resolve(this.cacheRoot, Cache.getCacheDirectory(downloadUrl), fileName);
|
||||
}
|
||||
async getPathForFileInCache(url, fileName) {
|
||||
const cachePath = this.getCachePath(url, fileName);
|
||||
if (await fs.pathExists(cachePath)) {
|
||||
return cachePath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async putFileInCache(url, currentPath, fileName) {
|
||||
const cachePath = this.getCachePath(url, fileName);
|
||||
d(`Moving ${currentPath} to ${cachePath}`);
|
||||
if (await fs.pathExists(cachePath)) {
|
||||
d('* Replacing existing file');
|
||||
await fs.remove(cachePath);
|
||||
}
|
||||
await fs.move(currentPath, cachePath);
|
||||
return cachePath;
|
||||
}
|
||||
}
|
||||
exports.Cache = Cache;
|
||||
//# sourceMappingURL=Cache.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/Cache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"Cache.js","sourceRoot":"","sources":["../../src/Cache.ts"],"names":[],"mappings":";;;;;;;;;;;;;AAAA,iCAA0B;AAC1B,yCAAiC;AACjC,+BAA+B;AAC/B,6BAA6B;AAC7B,2BAA2B;AAC3B,iCAAiC;AAEjC,MAAM,CAAC,GAAG,eAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,MAAM,gBAAgB,GAAG,mBAAQ,CAAC,UAAU,EAAE;IAC5C,MAAM,EAAE,EAAE;CACX,CAAC,CAAC,KAAK,CAAC;AAET,MAAa,KAAK;IAChB,YAAoB,YAAY,gBAAgB;QAA5B,cAAS,GAAT,SAAS,CAAmB;IAAG,CAAC;IAE7C,MAAM,CAAC,iBAAiB,CAAC,WAAmB;QACjD,MAAM,iBAAiB,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QACjD,6DAA6D;QAC7D,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAc,iBAAiB,EAA7B,gEAA6B,CAAC;QAC9D,MAAM,WAAW,GAAG,GAAG,CAAC,MAAM,iCAAM,IAAI,KAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,UAAU,CAAC,IAAG,CAAC;QAE5F,OAAO,MAAM;aACV,UAAU,CAAC,QAAQ,CAAC;aACpB,MAAM,CAAC,WAAW,CAAC;aACnB,MAAM,CAAC,KAAK,CAAC,CAAC;IACnB,CAAC;IAEM,YAAY,CAAC,WAAmB,EAAE,QAAgB;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,iBAAiB,CAAC,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IACtF,CAAC;IAEM,KAAK,CAAC,qBAAqB,CAAC,GAAW,EAAE,QAAgB;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK,CAAC,cAAc,CAAC,GAAW,EAAE,WAAmB,EAAE,QAAgB;QAC5E,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC,CAAC,UAAU,WAAW,OAAO,SAAS,EAAE,CAAC,CAAC;QAC3C,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,CAAC,CAAC,2BAA2B,CAAC,CAAC;YAC/B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC5B;QAED,MAAM,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;QAEtC,OAAO,SAAS,CAAC;IACnB,CAAC;CACF;AAxCD,sBAwCC"}
|
||||
3
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.d.ts
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface Downloader<T> {
|
||||
download(url: string, targetFilePath: string, options: T): Promise<void>;
|
||||
}
|
||||
3
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.js
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=Downloader.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/Downloader.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"Downloader.js","sourceRoot":"","sources":["../../src/Downloader.ts"],"names":[],"mappings":""}
|
||||
21
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.d.ts
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Progress as GotProgress, Options as GotOptions } from 'got';
|
||||
import { Downloader } from './Downloader';
|
||||
/**
|
||||
* See [`got#options`](https://github.com/sindresorhus/got#options) for possible keys/values.
|
||||
*/
|
||||
export declare type GotDownloaderOptions = (GotOptions & {
|
||||
isStream?: true;
|
||||
}) & {
|
||||
/**
|
||||
* if defined, triggers every time `got`'s `downloadProgress` event callback is triggered.
|
||||
*/
|
||||
getProgressCallback?: (progress: GotProgress) => Promise<void>;
|
||||
/**
|
||||
* if `true`, disables the console progress bar (setting the `ELECTRON_GET_NO_PROGRESS`
|
||||
* environment variable to a non-empty value also does this).
|
||||
*/
|
||||
quiet?: boolean;
|
||||
};
|
||||
export declare class GotDownloader implements Downloader<GotDownloaderOptions> {
|
||||
download(url: string, targetFilePath: string, options?: GotDownloaderOptions): Promise<void>;
|
||||
}
|
||||
76
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.js
generated
vendored
Normal file
76
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.js
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
"use strict";
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs-extra");
|
||||
const got_1 = require("got");
|
||||
const path = require("path");
|
||||
const ProgressBar = require("progress");
|
||||
const PROGRESS_BAR_DELAY_IN_SECONDS = 30;
|
||||
class GotDownloader {
|
||||
async download(url, targetFilePath, options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
const { quiet, getProgressCallback } = options, gotOptions = __rest(options, ["quiet", "getProgressCallback"]);
|
||||
let downloadCompleted = false;
|
||||
let bar;
|
||||
let progressPercent;
|
||||
let timeout = undefined;
|
||||
await fs.mkdirp(path.dirname(targetFilePath));
|
||||
const writeStream = fs.createWriteStream(targetFilePath);
|
||||
if (!quiet || !process.env.ELECTRON_GET_NO_PROGRESS) {
|
||||
const start = new Date();
|
||||
timeout = setTimeout(() => {
|
||||
if (!downloadCompleted) {
|
||||
bar = new ProgressBar(`Downloading ${path.basename(url)}: [:bar] :percent ETA: :eta seconds `, {
|
||||
curr: progressPercent,
|
||||
total: 100,
|
||||
});
|
||||
// https://github.com/visionmedia/node-progress/issues/159
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
bar.start = start;
|
||||
}
|
||||
}, PROGRESS_BAR_DELAY_IN_SECONDS * 1000);
|
||||
}
|
||||
await new Promise((resolve, reject) => {
|
||||
const downloadStream = got_1.default.stream(url, gotOptions);
|
||||
downloadStream.on('downloadProgress', async (progress) => {
|
||||
progressPercent = progress.percent;
|
||||
if (bar) {
|
||||
bar.update(progress.percent);
|
||||
}
|
||||
if (getProgressCallback) {
|
||||
await getProgressCallback(progress);
|
||||
}
|
||||
});
|
||||
downloadStream.on('error', error => {
|
||||
if (error instanceof got_1.HTTPError && error.response.statusCode === 404) {
|
||||
error.message += ` for ${error.response.url}`;
|
||||
}
|
||||
if (writeStream.destroy) {
|
||||
writeStream.destroy(error);
|
||||
}
|
||||
reject(error);
|
||||
});
|
||||
writeStream.on('error', error => reject(error));
|
||||
writeStream.on('close', () => resolve());
|
||||
downloadStream.pipe(writeStream);
|
||||
});
|
||||
downloadCompleted = true;
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.GotDownloader = GotDownloader;
|
||||
//# sourceMappingURL=GotDownloader.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/GotDownloader.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GotDownloader.js","sourceRoot":"","sources":["../../src/GotDownloader.ts"],"names":[],"mappings":";;;;;;;;;;;;;AAAA,+BAA+B;AAC/B,6BAAqF;AACrF,6BAA6B;AAC7B,wCAAwC;AAIxC,MAAM,6BAA6B,GAAG,EAAE,CAAC;AAiBzC,MAAa,aAAa;IACxB,KAAK,CAAC,QAAQ,CACZ,GAAW,EACX,cAAsB,EACtB,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QACD,MAAM,EAAE,KAAK,EAAE,mBAAmB,KAAoB,OAAO,EAAzB,8DAAyB,CAAC;QAC9D,IAAI,iBAAiB,GAAG,KAAK,CAAC;QAC9B,IAAI,GAA4B,CAAC;QACjC,IAAI,eAAuB,CAAC;QAC5B,IAAI,OAAO,GAA+B,SAAS,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;QAC9C,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,cAAc,CAAC,CAAC;QAEzD,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE;gBACxB,IAAI,CAAC,iBAAiB,EAAE;oBACtB,GAAG,GAAG,IAAI,WAAW,CACnB,eAAe,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,sCAAsC,EACvE;wBACE,IAAI,EAAE,eAAe;wBACrB,KAAK,EAAE,GAAG;qBACX,CACF,CAAC;oBACF,0DAA0D;oBAC1D,8DAA8D;oBAC7D,GAAW,CAAC,KAAK,GAAG,KAAK,CAAC;iBAC5B;YACH,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC,CAAC;SAC1C;QACD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,cAAc,GAAG,aAAG,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;YACnD,cAAc,CAAC,EAAE,CAAC,kBAAkB,EAAE,KAAK,EAAC,QAAQ,EAAC,EAAE;gBACrD,eAAe,GAAG,QAAQ,CAAC,OAAO,CAAC;gBACnC,IAAI,GAAG,EAAE;oBACP,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;iBAC9B;gBACD,IAAI,mBAAmB,EAAE;oBACvB,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;iBACrC;YACH,CAAC,CAAC,CAAC;YACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBACjC,IAAI,KAAK,YAAY,eAAS,IAAI,KAAK,CAAC,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;oBACnE,KAAK,CAAC,OAAO,IAAI,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;iBAC/C;gBACD,IAAI,WAAW,CAAC,OAAO,EAAE;oBACvB,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBAC5B;gBAED,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;YACH,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAChD,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;YAEzC,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,iBAAiB,GAAG,IAAI,CAAC;QACzB,IAAI,OAAO,EAAE;YACX,YAAY,CAAC,OAAO,CAAC,CAAC;SACvB;IACH,CAAC;CACF;AAlED,sCAkEC"}
|
||||
4
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.d.ts
generated
vendored
Normal file
4
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { ElectronArtifactDetails } from './types';
|
||||
export declare function getArtifactFileName(details: ElectronArtifactDetails): string;
|
||||
export declare function getArtifactRemoteURL(details: ElectronArtifactDetails): Promise<string>;
|
||||
export declare function getArtifactVersion(details: ElectronArtifactDetails): string;
|
||||
66
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.js
generated
vendored
Normal file
66
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const utils_1 = require("./utils");
|
||||
const BASE_URL = 'https://github.com/electron/electron/releases/download/';
|
||||
const NIGHTLY_BASE_URL = 'https://github.com/electron/nightlies/releases/download/';
|
||||
function getArtifactFileName(details) {
|
||||
utils_1.ensureIsTruthyString(details, 'artifactName');
|
||||
if (details.isGeneric) {
|
||||
return details.artifactName;
|
||||
}
|
||||
utils_1.ensureIsTruthyString(details, 'arch');
|
||||
utils_1.ensureIsTruthyString(details, 'platform');
|
||||
utils_1.ensureIsTruthyString(details, 'version');
|
||||
return `${[
|
||||
details.artifactName,
|
||||
details.version,
|
||||
details.platform,
|
||||
details.arch,
|
||||
...(details.artifactSuffix ? [details.artifactSuffix] : []),
|
||||
].join('-')}.zip`;
|
||||
}
|
||||
exports.getArtifactFileName = getArtifactFileName;
|
||||
function mirrorVar(name, options, defaultValue) {
|
||||
// Convert camelCase to camel_case for env var reading
|
||||
const snakeName = name.replace(/([a-z])([A-Z])/g, (_, a, b) => `${a}_${b}`).toLowerCase();
|
||||
return (
|
||||
// .npmrc
|
||||
process.env[`npm_config_electron_${name.toLowerCase()}`] ||
|
||||
process.env[`NPM_CONFIG_ELECTRON_${snakeName.toUpperCase()}`] ||
|
||||
process.env[`npm_config_electron_${snakeName}`] ||
|
||||
// package.json
|
||||
process.env[`npm_package_config_electron_${name}`] ||
|
||||
process.env[`npm_package_config_electron_${snakeName.toLowerCase()}`] ||
|
||||
// env
|
||||
process.env[`ELECTRON_${snakeName.toUpperCase()}`] ||
|
||||
options[name] ||
|
||||
defaultValue);
|
||||
}
|
||||
async function getArtifactRemoteURL(details) {
|
||||
const opts = details.mirrorOptions || {};
|
||||
let base = mirrorVar('mirror', opts, BASE_URL);
|
||||
if (details.version.includes('nightly')) {
|
||||
const nightlyDeprecated = mirrorVar('nightly_mirror', opts, '');
|
||||
if (nightlyDeprecated) {
|
||||
base = nightlyDeprecated;
|
||||
console.warn(`nightly_mirror is deprecated, please use nightlyMirror`);
|
||||
}
|
||||
else {
|
||||
base = mirrorVar('nightlyMirror', opts, NIGHTLY_BASE_URL);
|
||||
}
|
||||
}
|
||||
const path = mirrorVar('customDir', opts, details.version).replace('{{ version }}', details.version.replace(/^v/, ''));
|
||||
const file = mirrorVar('customFilename', opts, getArtifactFileName(details));
|
||||
// Allow customized download URL resolution.
|
||||
if (opts.resolveAssetURL) {
|
||||
const url = await opts.resolveAssetURL(details);
|
||||
return url;
|
||||
}
|
||||
return `${base}${path}/${file}`;
|
||||
}
|
||||
exports.getArtifactRemoteURL = getArtifactRemoteURL;
|
||||
function getArtifactVersion(details) {
|
||||
return utils_1.normalizeVersion(mirrorVar('customVersion', details.mirrorOptions || {}, details.version));
|
||||
}
|
||||
exports.getArtifactVersion = getArtifactVersion;
|
||||
//# sourceMappingURL=artifact-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/artifact-utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"artifact-utils.js","sourceRoot":"","sources":["../../src/artifact-utils.ts"],"names":[],"mappings":";;AACA,mCAAiE;AAEjE,MAAM,QAAQ,GAAG,yDAAyD,CAAC;AAC3E,MAAM,gBAAgB,GAAG,0DAA0D,CAAC;AAEpF,SAAgB,mBAAmB,CAAC,OAAgC;IAClE,4BAAoB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IAE9C,IAAI,OAAO,CAAC,SAAS,EAAE;QACrB,OAAO,OAAO,CAAC,YAAY,CAAC;KAC7B;IAED,4BAAoB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACtC,4BAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;IAC1C,4BAAoB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEzC,OAAO,GAAG;QACR,OAAO,CAAC,YAAY;QACpB,OAAO,CAAC,OAAO;QACf,OAAO,CAAC,QAAQ;QAChB,OAAO,CAAC,IAAI;QACZ,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC;AACpB,CAAC;AAlBD,kDAkBC;AAED,SAAS,SAAS,CAChB,IAAkD,EAClD,OAAsB,EACtB,YAAoB;IAEpB,sDAAsD;IACtD,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;IAE1F,OAAO;IACL,SAAS;IACT,OAAO,CAAC,GAAG,CAAC,uBAAuB,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QACxD,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,EAAE,CAAC;QAC/C,eAAe;QACf,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,EAAE,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,+BAA+B,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QACrE,MAAM;QACN,OAAO,CAAC,GAAG,CAAC,YAAY,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAClD,OAAO,CAAC,IAAI,CAAC;QACb,YAAY,CACb,CAAC;AACJ,CAAC;AAEM,KAAK,UAAU,oBAAoB,CAAC,OAAgC;IACzE,MAAM,IAAI,GAAkB,OAAO,CAAC,aAAa,IAAI,EAAE,CAAC;IACxD,IAAI,IAAI,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/C,IAAI,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACvC,MAAM,iBAAiB,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,IAAI,iBAAiB,EAAE;YACrB,IAAI,GAAG,iBAAiB,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,wDAAwD,CAAC,CAAC;SACxE;aAAM;YACL,IAAI,GAAG,SAAS,CAAC,eAAe,EAAE,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAC3D;KACF;IACD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,OAAO,CAChE,eAAe,EACf,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAClC,CAAC;IACF,MAAM,IAAI,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,mBAAmB,CAAC,OAAO,CAAC,CAAC,CAAC;IAE7E,4CAA4C;IAC5C,IAAI,IAAI,CAAC,eAAe,EAAE;QACxB,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;QAChD,OAAO,GAAG,CAAC;KACZ;IAED,OAAO,GAAG,IAAI,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC;AAClC,CAAC;AAzBD,oDAyBC;AAED,SAAgB,kBAAkB,CAAC,OAAgC;IACjE,OAAO,wBAAgB,CAAC,SAAS,CAAC,eAAe,EAAE,OAAO,CAAC,aAAa,IAAI,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;AACpG,CAAC;AAFD,gDAEC"}
|
||||
3
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.d.ts
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { DownloadOptions } from './types';
|
||||
import { Downloader } from './Downloader';
|
||||
export declare function getDownloaderForSystem(): Promise<Downloader<DownloadOptions>>;
|
||||
12
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.js
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
async function getDownloaderForSystem() {
|
||||
// TODO: Resolve the downloader or default to GotDownloader
|
||||
// Current thoughts are a dot-file traversal for something like
|
||||
// ".electron.downloader" which would be a text file with the name of the
|
||||
// npm module to import() and use as the downloader
|
||||
const { GotDownloader } = await Promise.resolve().then(() => require('./GotDownloader'));
|
||||
return new GotDownloader();
|
||||
}
|
||||
exports.getDownloaderForSystem = getDownloaderForSystem;
|
||||
//# sourceMappingURL=downloader-resolver.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/downloader-resolver.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"downloader-resolver.js","sourceRoot":"","sources":["../../src/downloader-resolver.ts"],"names":[],"mappings":";;AAGO,KAAK,UAAU,sBAAsB;IAC1C,2DAA2D;IAC3D,+DAA+D;IAC/D,yEAAyE;IACzE,mDAAmD;IACnD,MAAM,EAAE,aAAa,EAAE,GAAG,2CAAa,iBAAiB,EAAC,CAAC;IAC1D,OAAO,IAAI,aAAa,EAAE,CAAC;AAC7B,CAAC;AAPD,wDAOC"}
|
||||
18
desktop-operator/node_modules/@electron/get/dist/cjs/index.d.ts
generated
vendored
Normal file
18
desktop-operator/node_modules/@electron/get/dist/cjs/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { ElectronDownloadRequestOptions, ElectronPlatformArtifactDetailsWithDefaults } from './types';
|
||||
export { getHostArch } from './utils';
|
||||
export { initializeProxy } from './proxy';
|
||||
export * from './types';
|
||||
/**
|
||||
* Downloads an artifact from an Electron release and returns an absolute path
|
||||
* to the downloaded file.
|
||||
*
|
||||
* @param artifactDetails - The information required to download the artifact
|
||||
*/
|
||||
export declare function downloadArtifact(_artifactDetails: ElectronPlatformArtifactDetailsWithDefaults): Promise<string>;
|
||||
/**
|
||||
* Downloads a specific version of Electron and returns an absolute path to a
|
||||
* ZIP file.
|
||||
*
|
||||
* @param version - The version of Electron you want to download
|
||||
*/
|
||||
export declare function download(version: string, options?: ElectronDownloadRequestOptions): Promise<string>;
|
||||
140
desktop-operator/node_modules/@electron/get/dist/cjs/index.js
generated
vendored
Normal file
140
desktop-operator/node_modules/@electron/get/dist/cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,140 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const debug_1 = require("debug");
|
||||
const fs = require("fs-extra");
|
||||
const path = require("path");
|
||||
const semver = require("semver");
|
||||
const sumchecker = require("sumchecker");
|
||||
const artifact_utils_1 = require("./artifact-utils");
|
||||
const Cache_1 = require("./Cache");
|
||||
const downloader_resolver_1 = require("./downloader-resolver");
|
||||
const proxy_1 = require("./proxy");
|
||||
const utils_1 = require("./utils");
|
||||
var utils_2 = require("./utils");
|
||||
exports.getHostArch = utils_2.getHostArch;
|
||||
var proxy_2 = require("./proxy");
|
||||
exports.initializeProxy = proxy_2.initializeProxy;
|
||||
const d = debug_1.default('@electron/get:index');
|
||||
if (process.env.ELECTRON_GET_USE_PROXY) {
|
||||
proxy_1.initializeProxy();
|
||||
}
|
||||
async function validateArtifact(artifactDetails, downloadedAssetPath, _downloadArtifact) {
|
||||
return await utils_1.withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
|
||||
// Don't try to verify the hash of the hash file itself
|
||||
// and for older versions that don't have a SHASUMS256.txt
|
||||
if (!artifactDetails.artifactName.startsWith('SHASUMS256') &&
|
||||
!artifactDetails.unsafelyDisableChecksums &&
|
||||
semver.gte(artifactDetails.version, '1.3.2')) {
|
||||
let shasumPath;
|
||||
const checksums = artifactDetails.checksums;
|
||||
if (checksums) {
|
||||
shasumPath = path.resolve(tempFolder, 'SHASUMS256.txt');
|
||||
const fileNames = Object.keys(checksums);
|
||||
if (fileNames.length === 0) {
|
||||
throw new Error('Provided "checksums" object is empty, cannot generate a valid SHASUMS256.txt');
|
||||
}
|
||||
const generatedChecksums = fileNames
|
||||
.map(fileName => `${checksums[fileName]} *${fileName}`)
|
||||
.join('\n');
|
||||
await fs.writeFile(shasumPath, generatedChecksums);
|
||||
}
|
||||
else {
|
||||
shasumPath = await _downloadArtifact({
|
||||
isGeneric: true,
|
||||
version: artifactDetails.version,
|
||||
artifactName: 'SHASUMS256.txt',
|
||||
force: artifactDetails.force,
|
||||
downloadOptions: artifactDetails.downloadOptions,
|
||||
cacheRoot: artifactDetails.cacheRoot,
|
||||
downloader: artifactDetails.downloader,
|
||||
mirrorOptions: artifactDetails.mirrorOptions,
|
||||
});
|
||||
}
|
||||
// For versions 1.3.2 - 1.3.4, need to overwrite the `defaultTextEncoding` option:
|
||||
// https://github.com/electron/electron/pull/6676#discussion_r75332120
|
||||
if (semver.satisfies(artifactDetails.version, '1.3.2 - 1.3.4')) {
|
||||
const validatorOptions = {};
|
||||
validatorOptions.defaultTextEncoding = 'binary';
|
||||
const checker = new sumchecker.ChecksumValidator('sha256', shasumPath, validatorOptions);
|
||||
await checker.validate(path.dirname(downloadedAssetPath), path.basename(downloadedAssetPath));
|
||||
}
|
||||
else {
|
||||
await sumchecker('sha256', shasumPath, path.dirname(downloadedAssetPath), [
|
||||
path.basename(downloadedAssetPath),
|
||||
]);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Downloads an artifact from an Electron release and returns an absolute path
|
||||
* to the downloaded file.
|
||||
*
|
||||
* @param artifactDetails - The information required to download the artifact
|
||||
*/
|
||||
async function downloadArtifact(_artifactDetails) {
|
||||
const artifactDetails = Object.assign({}, _artifactDetails);
|
||||
if (!_artifactDetails.isGeneric) {
|
||||
const platformArtifactDetails = artifactDetails;
|
||||
if (!platformArtifactDetails.platform) {
|
||||
d('No platform found, defaulting to the host platform');
|
||||
platformArtifactDetails.platform = process.platform;
|
||||
}
|
||||
if (platformArtifactDetails.arch) {
|
||||
platformArtifactDetails.arch = utils_1.getNodeArch(platformArtifactDetails.arch);
|
||||
}
|
||||
else {
|
||||
d('No arch found, defaulting to the host arch');
|
||||
platformArtifactDetails.arch = utils_1.getHostArch();
|
||||
}
|
||||
}
|
||||
utils_1.ensureIsTruthyString(artifactDetails, 'version');
|
||||
artifactDetails.version = artifact_utils_1.getArtifactVersion(artifactDetails);
|
||||
const fileName = artifact_utils_1.getArtifactFileName(artifactDetails);
|
||||
const url = await artifact_utils_1.getArtifactRemoteURL(artifactDetails);
|
||||
const cache = new Cache_1.Cache(artifactDetails.cacheRoot);
|
||||
// Do not check if the file exists in the cache when force === true
|
||||
if (!artifactDetails.force) {
|
||||
d(`Checking the cache (${artifactDetails.cacheRoot}) for ${fileName} (${url})`);
|
||||
const cachedPath = await cache.getPathForFileInCache(url, fileName);
|
||||
if (cachedPath === null) {
|
||||
d('Cache miss');
|
||||
}
|
||||
else {
|
||||
d('Cache hit');
|
||||
try {
|
||||
await validateArtifact(artifactDetails, cachedPath, downloadArtifact);
|
||||
return cachedPath;
|
||||
}
|
||||
catch (err) {
|
||||
d("Artifact in cache didn't match checksums", err);
|
||||
d('falling back to re-download');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!artifactDetails.isGeneric &&
|
||||
utils_1.isOfficialLinuxIA32Download(artifactDetails.platform, artifactDetails.arch, artifactDetails.version, artifactDetails.mirrorOptions)) {
|
||||
console.warn('Official Linux/ia32 support is deprecated.');
|
||||
console.warn('For more info: https://electronjs.org/blog/linux-32bit-support');
|
||||
}
|
||||
return await utils_1.withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
|
||||
const tempDownloadPath = path.resolve(tempFolder, artifact_utils_1.getArtifactFileName(artifactDetails));
|
||||
const downloader = artifactDetails.downloader || (await downloader_resolver_1.getDownloaderForSystem());
|
||||
d(`Downloading ${url} to ${tempDownloadPath} with options: ${JSON.stringify(artifactDetails.downloadOptions)}`);
|
||||
await downloader.download(url, tempDownloadPath, artifactDetails.downloadOptions);
|
||||
await validateArtifact(artifactDetails, tempDownloadPath, downloadArtifact);
|
||||
return await cache.putFileInCache(url, tempDownloadPath, fileName);
|
||||
});
|
||||
}
|
||||
exports.downloadArtifact = downloadArtifact;
|
||||
/**
|
||||
* Downloads a specific version of Electron and returns an absolute path to a
|
||||
* ZIP file.
|
||||
*
|
||||
* @param version - The version of Electron you want to download
|
||||
*/
|
||||
function download(version, options) {
|
||||
return downloadArtifact(Object.assign(Object.assign({}, options), { version, platform: process.platform, arch: process.arch, artifactName: 'electron' }));
|
||||
}
|
||||
exports.download = download;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;AAAA,iCAA0B;AAC1B,+BAA+B;AAC/B,6BAA6B;AAC7B,iCAAiC;AACjC,yCAAyC;AAEzC,qDAAiG;AAOjG,mCAAgC;AAChC,+DAA+D;AAC/D,mCAA0C;AAC1C,mCAOiB;AAEjB,iCAAsC;AAA7B,8BAAA,WAAW,CAAA;AACpB,iCAA0C;AAAjC,kCAAA,eAAe,CAAA;AAGxB,MAAM,CAAC,GAAG,eAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE;IACtC,uBAAe,EAAE,CAAC;CACnB;AAMD,KAAK,UAAU,gBAAgB,CAC7B,eAAwC,EACxC,mBAA2B,EAC3B,iBAAqC;IAErC,OAAO,MAAM,2BAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,uDAAuD;QACvD,0DAA0D;QAC1D,IACE,CAAC,eAAe,CAAC,YAAY,CAAC,UAAU,CAAC,YAAY,CAAC;YACtD,CAAC,eAAe,CAAC,wBAAwB;YACzC,MAAM,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,CAAC,EAC5C;YACA,IAAI,UAAkB,CAAC;YACvB,MAAM,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;YAC5C,IAAI,SAAS,EAAE;gBACb,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACxD,MAAM,SAAS,GAAa,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;gBACnD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;oBAC1B,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E,CAAC;iBACH;gBACD,MAAM,kBAAkB,GAAG,SAAS;qBACjC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE,CAAC;qBACtD,IAAI,CAAC,IAAI,CAAC,CAAC;gBACd,MAAM,EAAE,CAAC,SAAS,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;aACpD;iBAAM;gBACL,UAAU,GAAG,MAAM,iBAAiB,CAAC;oBACnC,SAAS,EAAE,IAAI;oBACf,OAAO,EAAE,eAAe,CAAC,OAAO;oBAChC,YAAY,EAAE,gBAAgB;oBAC9B,KAAK,EAAE,eAAe,CAAC,KAAK;oBAC5B,eAAe,EAAE,eAAe,CAAC,eAAe;oBAChD,SAAS,EAAE,eAAe,CAAC,SAAS;oBACpC,UAAU,EAAE,eAAe,CAAC,UAAU;oBACtC,aAAa,EAAE,eAAe,CAAC,aAAa;iBAC7C,CAAC,CAAC;aACJ;YAED,kFAAkF;YAClF,sEAAsE;YACtE,IAAI,MAAM,CAAC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,eAAe,CAAC,EAAE;gBAC9D,MAAM,gBAAgB,GAA+B,EAAE,CAAC;gBACxD,gBAAgB,CAAC,mBAAmB,GAAG,QAAQ,CAAC;gBAChD,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACzF,MAAM,OAAO,CAAC,QAAQ,CACpB,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EACjC,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CACnC,CAAC;aACH;iBAAM;gBACL,MAAM,UAAU,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oBACxE,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC;iBACnC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACI,KAAK,UAAU,gBAAgB,CACpC,gBAA6D;IAE7D,MAAM,eAAe,qBACf,gBAA4C,CACjD,CAAC;IACF,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC/B,MAAM,uBAAuB,GAAG,eAAkD,CAAC;QACnF,IAAI,CAAC,uBAAuB,CAAC,QAAQ,EAAE;YACrC,CAAC,CAAC,oDAAoD,CAAC,CAAC;YACxD,uBAAuB,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;SACrD;QACD,IAAI,uBAAuB,CAAC,IAAI,EAAE;YAChC,uBAAuB,CAAC,IAAI,GAAG,mBAAW,CAAC,uBAAuB,CAAC,IAAI,CAAC,CAAC;SAC1E;aAAM;YACL,CAAC,CAAC,4CAA4C,CAAC,CAAC;YAChD,uBAAuB,CAAC,IAAI,GAAG,mBAAW,EAAE,CAAC;SAC9C;KACF;IACD,4BAAoB,CAAC,eAAe,EAAE,SAAS,CAAC,CAAC;IAEjD,eAAe,CAAC,OAAO,GAAG,mCAAkB,CAAC,eAAe,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,oCAAmB,CAAC,eAAe,CAAC,CAAC;IACtD,MAAM,GAAG,GAAG,MAAM,qCAAoB,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,IAAI,aAAK,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;IAEnD,mEAAmE;IACnE,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE;QAC1B,CAAC,CAAC,uBAAuB,eAAe,CAAC,SAAS,SAAS,QAAQ,KAAK,GAAG,GAAG,CAAC,CAAC;QAChF,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,qBAAqB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAEpE,IAAI,UAAU,KAAK,IAAI,EAAE;YACvB,CAAC,CAAC,YAAY,CAAC,CAAC;SACjB;aAAM;YACL,CAAC,CAAC,WAAW,CAAC,CAAC;YACf,IAAI;gBACF,MAAM,gBAAgB,CAAC,eAAe,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAEtE,OAAO,UAAU,CAAC;aACnB;YAAC,OAAO,GAAG,EAAE;gBACZ,CAAC,CAAC,0CAA0C,EAAE,GAAG,CAAC,CAAC;gBACnD,CAAC,CAAC,6BAA6B,CAAC,CAAC;aAClC;SACF;KACF;IAED,IACE,CAAC,eAAe,CAAC,SAAS;QAC1B,mCAA2B,CACzB,eAAe,CAAC,QAAQ,EACxB,eAAe,CAAC,IAAI,EACpB,eAAe,CAAC,OAAO,EACvB,eAAe,CAAC,aAAa,CAC9B,EACD;QACA,OAAO,CAAC,IAAI,CAAC,4CAA4C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,gEAAgE,CAAC,CAAC;KAChF;IAED,OAAO,MAAM,2BAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,MAAM,gBAAgB,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,oCAAmB,CAAC,eAAe,CAAC,CAAC,CAAC;QAExF,MAAM,UAAU,GAAG,eAAe,CAAC,UAAU,IAAI,CAAC,MAAM,4CAAsB,EAAE,CAAC,CAAC;QAClF,CAAC,CACC,eAAe,GAAG,OAAO,gBAAgB,kBAAkB,IAAI,CAAC,SAAS,CACvE,eAAe,CAAC,eAAe,CAChC,EAAE,CACJ,CAAC;QACF,MAAM,UAAU,CAAC,QAAQ,CAAC,GAAG,EAAE,gBAAgB,EAAE,eAAe,CAAC,eAAe,CAAC,CAAC;QAElF,MAAM,gBAAgB,CAAC,eAAe,EAAE,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;QAE5E,OAAO,MAAM,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IACrE,CAAC,CAAC,CAAC;AACL,CAAC;AA1ED,4CA0EC;AAED;;;;;GAKG;AACH,SAAgB,QAAQ,CACtB,OAAe,EACf,OAAwC;IAExC,OAAO,gBAAgB,iCAClB,OAAO,KACV,OAAO,EACP,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,IAAI,EAAE,OAAO,CAAC,IAAI,EAClB,YAAY,EAAE,UAAU,IACxB,CAAC;AACL,CAAC;AAXD,4BAWC"}
|
||||
4
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.d.ts
generated
vendored
Normal file
4
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Initializes a third-party proxy module for HTTP(S) requests.
|
||||
*/
|
||||
export declare function initializeProxy(): void;
|
||||
27
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.js
generated
vendored
Normal file
27
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const debug = require("debug");
|
||||
const utils_1 = require("./utils");
|
||||
const d = debug('@electron/get:proxy');
|
||||
/**
|
||||
* Initializes a third-party proxy module for HTTP(S) requests.
|
||||
*/
|
||||
function initializeProxy() {
|
||||
try {
|
||||
// See: https://github.com/electron/get/pull/214#discussion_r798845713
|
||||
const env = utils_1.getEnv('GLOBAL_AGENT_');
|
||||
utils_1.setEnv('GLOBAL_AGENT_HTTP_PROXY', env('HTTP_PROXY'));
|
||||
utils_1.setEnv('GLOBAL_AGENT_HTTPS_PROXY', env('HTTPS_PROXY'));
|
||||
utils_1.setEnv('GLOBAL_AGENT_NO_PROXY', env('NO_PROXY'));
|
||||
/**
|
||||
* TODO: replace global-agent with a hpagent. @BlackHole1
|
||||
* https://github.com/sindresorhus/got/blob/HEAD/documentation/tips.md#proxying
|
||||
*/
|
||||
require('global-agent').bootstrap();
|
||||
}
|
||||
catch (e) {
|
||||
d('Could not load either proxy modules, built-in proxy support not available:', e);
|
||||
}
|
||||
}
|
||||
exports.initializeProxy = initializeProxy;
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/proxy.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../../src/proxy.ts"],"names":[],"mappings":";;AAAA,+BAA+B;AAC/B,mCAAyC;AAEzC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC;;GAEG;AACH,SAAgB,eAAe;IAC7B,IAAI;QACF,sEAAsE;QACtE,MAAM,GAAG,GAAG,cAAM,CAAC,eAAe,CAAC,CAAC;QAEpC,cAAM,CAAC,yBAAyB,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QACrD,cAAM,CAAC,0BAA0B,EAAE,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;QACvD,cAAM,CAAC,uBAAuB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC;QAEjD;;;WAGG;QACH,OAAO,CAAC,cAAc,CAAC,CAAC,SAAS,EAAE,CAAC;KACrC;IAAC,OAAO,CAAC,EAAE;QACV,CAAC,CAAC,4EAA4E,EAAE,CAAC,CAAC,CAAC;KACpF;AACH,CAAC;AAjBD,0CAiBC"}
|
||||
129
desktop-operator/node_modules/@electron/get/dist/cjs/types.d.ts
generated
vendored
Normal file
129
desktop-operator/node_modules/@electron/get/dist/cjs/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import { Downloader } from './Downloader';
|
||||
export declare type DownloadOptions = any;
|
||||
export interface MirrorOptions {
|
||||
/**
|
||||
* DEPRECATED - see nightlyMirror.
|
||||
*/
|
||||
nightly_mirror?: string;
|
||||
/**
|
||||
* The Electron nightly-specific mirror URL.
|
||||
*/
|
||||
nightlyMirror?: string;
|
||||
/**
|
||||
* The base URL of the mirror to download from,
|
||||
* e.g https://github.com/electron/electron/releases/download
|
||||
*/
|
||||
mirror?: string;
|
||||
/**
|
||||
* The name of the directory to download from,
|
||||
* often scoped by version number e.g 'v4.0.4'
|
||||
*/
|
||||
customDir?: string;
|
||||
/**
|
||||
* The name of the asset to download,
|
||||
* e.g 'electron-v4.0.4-linux-x64.zip'
|
||||
*/
|
||||
customFilename?: string;
|
||||
/**
|
||||
* The version of the asset to download,
|
||||
* e.g '4.0.4'
|
||||
*/
|
||||
customVersion?: string;
|
||||
/**
|
||||
* A function allowing customization of the url returned
|
||||
* from getArtifactRemoteURL().
|
||||
*/
|
||||
resolveAssetURL?: (opts: DownloadOptions) => Promise<string>;
|
||||
}
|
||||
export interface ElectronDownloadRequest {
|
||||
/**
|
||||
* The version of Electron associated with the artifact.
|
||||
*/
|
||||
version: string;
|
||||
/**
|
||||
* The type of artifact. For example:
|
||||
* * `electron`
|
||||
* * `ffmpeg`
|
||||
*/
|
||||
artifactName: string;
|
||||
}
|
||||
export interface ElectronDownloadRequestOptions {
|
||||
/**
|
||||
* Whether to download an artifact regardless of whether it's in the cache directory.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
force?: boolean;
|
||||
/**
|
||||
* When set to `true`, disables checking that the artifact download completed successfully
|
||||
* with the correct payload.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
unsafelyDisableChecksums?: boolean;
|
||||
/**
|
||||
* Provides checksums for the artifact as strings.
|
||||
* Can be used if you already know the checksums of the Electron artifact
|
||||
* you are downloading and want to skip the checksum file download
|
||||
* without skipping the checksum validation.
|
||||
*
|
||||
* This should be an object whose keys are the file names of the artifacts and
|
||||
* the values are their respective SHA256 checksums.
|
||||
*/
|
||||
checksums?: Record<string, string>;
|
||||
/**
|
||||
* The directory that caches Electron artifact downloads.
|
||||
*
|
||||
* The default value is dependent upon the host platform:
|
||||
*
|
||||
* * Linux: `$XDG_CACHE_HOME` or `~/.cache/electron/`
|
||||
* * MacOS: `~/Library/Caches/electron/`
|
||||
* * Windows: `%LOCALAPPDATA%/electron/Cache` or `~/AppData/Local/electron/Cache/`
|
||||
*/
|
||||
cacheRoot?: string;
|
||||
/**
|
||||
* Options passed to the downloader module.
|
||||
*/
|
||||
downloadOptions?: DownloadOptions;
|
||||
/**
|
||||
* Options related to specifying an artifact mirror.
|
||||
*/
|
||||
mirrorOptions?: MirrorOptions;
|
||||
/**
|
||||
* The custom [[Downloader]] class used to download artifacts. Defaults to the
|
||||
* built-in [[GotDownloader]].
|
||||
*/
|
||||
downloader?: Downloader<DownloadOptions>;
|
||||
/**
|
||||
* A temporary directory for downloads.
|
||||
* It is used before artifacts are put into cache.
|
||||
*/
|
||||
tempDirectory?: string;
|
||||
}
|
||||
export declare type ElectronPlatformArtifactDetails = {
|
||||
/**
|
||||
* The target artifact platform. These are Node-style platform names, for example:
|
||||
* * `win32`
|
||||
* * `darwin`
|
||||
* * `linux`
|
||||
*/
|
||||
platform: string;
|
||||
/**
|
||||
* The target artifact architecture. These are Node-style architecture names, for example:
|
||||
* * `ia32`
|
||||
* * `x64`
|
||||
* * `armv7l`
|
||||
*/
|
||||
arch: string;
|
||||
artifactSuffix?: string;
|
||||
isGeneric?: false;
|
||||
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
|
||||
export declare type ElectronGenericArtifactDetails = {
|
||||
isGeneric: true;
|
||||
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
|
||||
export declare type ElectronArtifactDetails = ElectronPlatformArtifactDetails | ElectronGenericArtifactDetails;
|
||||
export declare type Omit<T, K> = Pick<T, Exclude<keyof T, K>>;
|
||||
export declare type ElectronPlatformArtifactDetailsWithDefaults = (Omit<ElectronPlatformArtifactDetails, 'platform' | 'arch'> & {
|
||||
platform?: string;
|
||||
arch?: string;
|
||||
}) | ElectronGenericArtifactDetails;
|
||||
3
desktop-operator/node_modules/@electron/get/dist/cjs/types.js
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/cjs/types.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/types.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/types.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":""}
|
||||
25
desktop-operator/node_modules/@electron/get/dist/cjs/utils.d.ts
generated
vendored
Normal file
25
desktop-operator/node_modules/@electron/get/dist/cjs/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
export declare function withTempDirectoryIn<T>(parentDirectory: string | undefined, fn: (directory: string) => Promise<T>): Promise<T>;
|
||||
export declare function withTempDirectory<T>(fn: (directory: string) => Promise<T>): Promise<T>;
|
||||
export declare function normalizeVersion(version: string): string;
|
||||
/**
|
||||
* Runs the `uname` command and returns the trimmed output.
|
||||
*/
|
||||
export declare function uname(): string;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name.
|
||||
*/
|
||||
export declare function getNodeArch(arch: string): string;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name, from the `process` module information.
|
||||
*/
|
||||
export declare function getHostArch(): string;
|
||||
export declare function ensureIsTruthyString<T, K extends keyof T>(obj: T, key: K): void;
|
||||
export declare function isOfficialLinuxIA32Download(platform: string, arch: string, version: string, mirrorOptions?: object): boolean;
|
||||
/**
|
||||
* Find the value of a environment variable which may or may not have the
|
||||
* prefix, in a case-insensitive manner.
|
||||
*/
|
||||
export declare function getEnv(prefix?: string): (name: string) => string | undefined;
|
||||
export declare function setEnv(key: string, value: string | undefined): void;
|
||||
107
desktop-operator/node_modules/@electron/get/dist/cjs/utils.js
generated
vendored
Normal file
107
desktop-operator/node_modules/@electron/get/dist/cjs/utils.js
generated
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const childProcess = require("child_process");
|
||||
const fs = require("fs-extra");
|
||||
const os = require("os");
|
||||
const path = require("path");
|
||||
async function useAndRemoveDirectory(directory, fn) {
|
||||
let result;
|
||||
try {
|
||||
result = await fn(directory);
|
||||
}
|
||||
finally {
|
||||
await fs.remove(directory);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
async function withTempDirectoryIn(parentDirectory = os.tmpdir(), fn) {
|
||||
const tempDirectoryPrefix = 'electron-download-';
|
||||
const tempDirectory = await fs.mkdtemp(path.resolve(parentDirectory, tempDirectoryPrefix));
|
||||
return useAndRemoveDirectory(tempDirectory, fn);
|
||||
}
|
||||
exports.withTempDirectoryIn = withTempDirectoryIn;
|
||||
async function withTempDirectory(fn) {
|
||||
return withTempDirectoryIn(undefined, fn);
|
||||
}
|
||||
exports.withTempDirectory = withTempDirectory;
|
||||
function normalizeVersion(version) {
|
||||
if (!version.startsWith('v')) {
|
||||
return `v${version}`;
|
||||
}
|
||||
return version;
|
||||
}
|
||||
exports.normalizeVersion = normalizeVersion;
|
||||
/**
|
||||
* Runs the `uname` command and returns the trimmed output.
|
||||
*/
|
||||
function uname() {
|
||||
return childProcess
|
||||
.execSync('uname -m')
|
||||
.toString()
|
||||
.trim();
|
||||
}
|
||||
exports.uname = uname;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name.
|
||||
*/
|
||||
function getNodeArch(arch) {
|
||||
if (arch === 'arm') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
switch (process.config.variables.arm_version) {
|
||||
case '6':
|
||||
return uname();
|
||||
case '7':
|
||||
default:
|
||||
return 'armv7l';
|
||||
}
|
||||
}
|
||||
return arch;
|
||||
}
|
||||
exports.getNodeArch = getNodeArch;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name, from the `process` module information.
|
||||
*/
|
||||
function getHostArch() {
|
||||
return getNodeArch(process.arch);
|
||||
}
|
||||
exports.getHostArch = getHostArch;
|
||||
function ensureIsTruthyString(obj, key) {
|
||||
if (!obj[key] || typeof obj[key] !== 'string') {
|
||||
throw new Error(`Expected property "${key}" to be provided as a string but it was not`);
|
||||
}
|
||||
}
|
||||
exports.ensureIsTruthyString = ensureIsTruthyString;
|
||||
function isOfficialLinuxIA32Download(platform, arch, version, mirrorOptions) {
|
||||
return (platform === 'linux' &&
|
||||
arch === 'ia32' &&
|
||||
Number(version.slice(1).split('.')[0]) >= 4 &&
|
||||
typeof mirrorOptions === 'undefined');
|
||||
}
|
||||
exports.isOfficialLinuxIA32Download = isOfficialLinuxIA32Download;
|
||||
/**
|
||||
* Find the value of a environment variable which may or may not have the
|
||||
* prefix, in a case-insensitive manner.
|
||||
*/
|
||||
function getEnv(prefix = '') {
|
||||
const envsLowerCase = {};
|
||||
for (const envKey in process.env) {
|
||||
envsLowerCase[envKey.toLowerCase()] = process.env[envKey];
|
||||
}
|
||||
return (name) => {
|
||||
return (envsLowerCase[`${prefix}${name}`.toLowerCase()] ||
|
||||
envsLowerCase[name.toLowerCase()] ||
|
||||
undefined);
|
||||
};
|
||||
}
|
||||
exports.getEnv = getEnv;
|
||||
function setEnv(key, value) {
|
||||
// The `void` operator always returns `undefined`.
|
||||
// See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/void
|
||||
if (value !== void 0) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
exports.setEnv = setEnv;
|
||||
//# sourceMappingURL=utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/cjs/utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/cjs/utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/utils.ts"],"names":[],"mappings":";;AAAA,8CAA8C;AAC9C,+BAA+B;AAC/B,yBAAyB;AACzB,6BAA6B;AAE7B,KAAK,UAAU,qBAAqB,CAClC,SAAiB,EACjB,EAAqC;IAErC,IAAI,MAAS,CAAC;IACd,IAAI;QACF,MAAM,GAAG,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC;KAC9B;YAAS;QACR,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;KAC5B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAEM,KAAK,UAAU,mBAAmB,CACvC,kBAA0B,EAAE,CAAC,MAAM,EAAE,EACrC,EAAqC;IAErC,MAAM,mBAAmB,GAAG,oBAAoB,CAAC;IACjD,MAAM,aAAa,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC,CAAC;IAC3F,OAAO,qBAAqB,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC;AAClD,CAAC;AAPD,kDAOC;AAEM,KAAK,UAAU,iBAAiB,CAAI,EAAqC;IAC9E,OAAO,mBAAmB,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;AAC5C,CAAC;AAFD,8CAEC;AAED,SAAgB,gBAAgB,CAAC,OAAe;IAC9C,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC5B,OAAO,IAAI,OAAO,EAAE,CAAC;KACtB;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AALD,4CAKC;AAED;;GAEG;AACH,SAAgB,KAAK;IACnB,OAAO,YAAY;SAChB,QAAQ,CAAC,UAAU,CAAC;SACpB,QAAQ,EAAE;SACV,IAAI,EAAE,CAAC;AACZ,CAAC;AALD,sBAKC;AAED;;;GAGG;AACH,SAAgB,WAAW,CAAC,IAAY;IACtC,IAAI,IAAI,KAAK,KAAK,EAAE;QAClB,8DAA8D;QAC9D,QAAS,OAAO,CAAC,MAAM,CAAC,SAAiB,CAAC,WAAW,EAAE;YACrD,KAAK,GAAG;gBACN,OAAO,KAAK,EAAE,CAAC;YACjB,KAAK,GAAG,CAAC;YACT;gBACE,OAAO,QAAQ,CAAC;SACnB;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAbD,kCAaC;AAED;;;GAGG;AACH,SAAgB,WAAW;IACzB,OAAO,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAFD,kCAEC;AAED,SAAgB,oBAAoB,CAAuB,GAAM,EAAE,GAAM;IACvE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,OAAO,GAAG,CAAC,GAAG,CAAC,KAAK,QAAQ,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,sBAAsB,GAAG,6CAA6C,CAAC,CAAC;KACzF;AACH,CAAC;AAJD,oDAIC;AAED,SAAgB,2BAA2B,CACzC,QAAgB,EAChB,IAAY,EACZ,OAAe,EACf,aAAsB;IAEtB,OAAO,CACL,QAAQ,KAAK,OAAO;QACpB,IAAI,KAAK,MAAM;QACf,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QAC3C,OAAO,aAAa,KAAK,WAAW,CACrC,CAAC;AACJ,CAAC;AAZD,kEAYC;AAED;;;GAGG;AACH,SAAgB,MAAM,CAAC,MAAM,GAAG,EAAE;IAChC,MAAM,aAAa,GAAsB,EAAE,CAAC;IAE5C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,GAAG,EAAE;QAChC,aAAa,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;KAC3D;IAED,OAAO,CAAC,IAAY,EAAsB,EAAE;QAC1C,OAAO,CACL,aAAa,CAAC,GAAG,MAAM,GAAG,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;YAC/C,aAAa,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;YACjC,SAAS,CACV,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAdD,wBAcC;AAED,SAAgB,MAAM,CAAC,GAAW,EAAE,KAAyB;IAC3D,kDAAkD;IAClD,wFAAwF;IACxF,IAAI,KAAK,KAAK,KAAK,CAAC,EAAE;QACpB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAC1B;AACH,CAAC;AAND,wBAMC"}
|
||||
8
desktop-operator/node_modules/@electron/get/dist/esm/Cache.d.ts
generated
vendored
Normal file
8
desktop-operator/node_modules/@electron/get/dist/esm/Cache.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export declare class Cache {
|
||||
private cacheRoot;
|
||||
constructor(cacheRoot?: string);
|
||||
static getCacheDirectory(downloadUrl: string): string;
|
||||
getCachePath(downloadUrl: string, fileName: string): string;
|
||||
getPathForFileInCache(url: string, fileName: string): Promise<string | null>;
|
||||
putFileInCache(url: string, currentPath: string, fileName: string): Promise<string>;
|
||||
}
|
||||
57
desktop-operator/node_modules/@electron/get/dist/esm/Cache.js
generated
vendored
Normal file
57
desktop-operator/node_modules/@electron/get/dist/esm/Cache.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import debug from 'debug';
|
||||
import envPaths from 'env-paths';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as url from 'url';
|
||||
import * as crypto from 'crypto';
|
||||
const d = debug('@electron/get:cache');
|
||||
const defaultCacheRoot = envPaths('electron', {
|
||||
suffix: '',
|
||||
}).cache;
|
||||
export class Cache {
|
||||
constructor(cacheRoot = defaultCacheRoot) {
|
||||
this.cacheRoot = cacheRoot;
|
||||
}
|
||||
static getCacheDirectory(downloadUrl) {
|
||||
const parsedDownloadUrl = url.parse(downloadUrl);
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { search, hash, pathname } = parsedDownloadUrl, rest = __rest(parsedDownloadUrl, ["search", "hash", "pathname"]);
|
||||
const strippedUrl = url.format(Object.assign(Object.assign({}, rest), { pathname: path.dirname(pathname || 'electron') }));
|
||||
return crypto
|
||||
.createHash('sha256')
|
||||
.update(strippedUrl)
|
||||
.digest('hex');
|
||||
}
|
||||
getCachePath(downloadUrl, fileName) {
|
||||
return path.resolve(this.cacheRoot, Cache.getCacheDirectory(downloadUrl), fileName);
|
||||
}
|
||||
async getPathForFileInCache(url, fileName) {
|
||||
const cachePath = this.getCachePath(url, fileName);
|
||||
if (await fs.pathExists(cachePath)) {
|
||||
return cachePath;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async putFileInCache(url, currentPath, fileName) {
|
||||
const cachePath = this.getCachePath(url, fileName);
|
||||
d(`Moving ${currentPath} to ${cachePath}`);
|
||||
if (await fs.pathExists(cachePath)) {
|
||||
d('* Replacing existing file');
|
||||
await fs.remove(cachePath);
|
||||
}
|
||||
await fs.move(currentPath, cachePath);
|
||||
return cachePath;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=Cache.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/Cache.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/Cache.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"Cache.js","sourceRoot":"","sources":["../../src/Cache.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,QAAQ,MAAM,WAAW,CAAC;AACjC,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,GAAG,MAAM,KAAK,CAAC;AAC3B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AAEjC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,MAAM,gBAAgB,GAAG,QAAQ,CAAC,UAAU,EAAE;IAC5C,MAAM,EAAE,EAAE;CACX,CAAC,CAAC,KAAK,CAAC;AAET,MAAM,OAAO,KAAK;IAChB,YAAoB,YAAY,gBAAgB;QAA5B,cAAS,GAAT,SAAS,CAAmB;IAAG,CAAC;IAE7C,MAAM,CAAC,iBAAiB,CAAC,WAAmB;QACjD,MAAM,iBAAiB,GAAG,GAAG,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC;QACjD,6DAA6D;QAC7D,MAAM,EAAE,MAAM,EAAE,IAAI,EAAE,QAAQ,KAAc,iBAAiB,EAA7B,gEAA6B,CAAC;QAC9D,MAAM,WAAW,GAAG,GAAG,CAAC,MAAM,iCAAM,IAAI,KAAE,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,IAAI,UAAU,CAAC,IAAG,CAAC;QAE5F,OAAO,MAAM;aACV,UAAU,CAAC,QAAQ,CAAC;aACpB,MAAM,CAAC,WAAW,CAAC;aACnB,MAAM,CAAC,KAAK,CAAC,CAAC;IACnB,CAAC;IAEM,YAAY,CAAC,WAAmB,EAAE,QAAgB;QACvD,OAAO,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,EAAE,KAAK,CAAC,iBAAiB,CAAC,WAAW,CAAC,EAAE,QAAQ,CAAC,CAAC;IACtF,CAAC;IAEM,KAAK,CAAC,qBAAqB,CAAC,GAAW,EAAE,QAAgB;QAC9D,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,OAAO,SAAS,CAAC;SAClB;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK,CAAC,cAAc,CAAC,GAAW,EAAE,WAAmB,EAAE,QAAgB;QAC5E,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QACnD,CAAC,CAAC,UAAU,WAAW,OAAO,SAAS,EAAE,CAAC,CAAC;QAC3C,IAAI,MAAM,EAAE,CAAC,UAAU,CAAC,SAAS,CAAC,EAAE;YAClC,CAAC,CAAC,2BAA2B,CAAC,CAAC;YAC/B,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;SAC5B;QAED,MAAM,EAAE,CAAC,IAAI,CAAC,WAAW,EAAE,SAAS,CAAC,CAAC;QAEtC,OAAO,SAAS,CAAC;IACnB,CAAC;CACF"}
|
||||
3
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.d.ts
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export interface Downloader<T> {
|
||||
download(url: string, targetFilePath: string, options: T): Promise<void>;
|
||||
}
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.js
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
//# sourceMappingURL=Downloader.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/Downloader.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"Downloader.js","sourceRoot":"","sources":["../../src/Downloader.ts"],"names":[],"mappings":""}
|
||||
21
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.d.ts
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.d.ts
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { Progress as GotProgress, Options as GotOptions } from 'got';
|
||||
import { Downloader } from './Downloader';
|
||||
/**
|
||||
* See [`got#options`](https://github.com/sindresorhus/got#options) for possible keys/values.
|
||||
*/
|
||||
export declare type GotDownloaderOptions = (GotOptions & {
|
||||
isStream?: true;
|
||||
}) & {
|
||||
/**
|
||||
* if defined, triggers every time `got`'s `downloadProgress` event callback is triggered.
|
||||
*/
|
||||
getProgressCallback?: (progress: GotProgress) => Promise<void>;
|
||||
/**
|
||||
* if `true`, disables the console progress bar (setting the `ELECTRON_GET_NO_PROGRESS`
|
||||
* environment variable to a non-empty value also does this).
|
||||
*/
|
||||
quiet?: boolean;
|
||||
};
|
||||
export declare class GotDownloader implements Downloader<GotDownloaderOptions> {
|
||||
download(url: string, targetFilePath: string, options?: GotDownloaderOptions): Promise<void>;
|
||||
}
|
||||
73
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.js
generated
vendored
Normal file
73
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.js
generated
vendored
Normal file
@@ -0,0 +1,73 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import * as fs from 'fs-extra';
|
||||
import got, { HTTPError } from 'got';
|
||||
import * as path from 'path';
|
||||
import * as ProgressBar from 'progress';
|
||||
const PROGRESS_BAR_DELAY_IN_SECONDS = 30;
|
||||
export class GotDownloader {
|
||||
async download(url, targetFilePath, options) {
|
||||
if (!options) {
|
||||
options = {};
|
||||
}
|
||||
const { quiet, getProgressCallback } = options, gotOptions = __rest(options, ["quiet", "getProgressCallback"]);
|
||||
let downloadCompleted = false;
|
||||
let bar;
|
||||
let progressPercent;
|
||||
let timeout = undefined;
|
||||
await fs.mkdirp(path.dirname(targetFilePath));
|
||||
const writeStream = fs.createWriteStream(targetFilePath);
|
||||
if (!quiet || !process.env.ELECTRON_GET_NO_PROGRESS) {
|
||||
const start = new Date();
|
||||
timeout = setTimeout(() => {
|
||||
if (!downloadCompleted) {
|
||||
bar = new ProgressBar(`Downloading ${path.basename(url)}: [:bar] :percent ETA: :eta seconds `, {
|
||||
curr: progressPercent,
|
||||
total: 100,
|
||||
});
|
||||
// https://github.com/visionmedia/node-progress/issues/159
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
bar.start = start;
|
||||
}
|
||||
}, PROGRESS_BAR_DELAY_IN_SECONDS * 1000);
|
||||
}
|
||||
await new Promise((resolve, reject) => {
|
||||
const downloadStream = got.stream(url, gotOptions);
|
||||
downloadStream.on('downloadProgress', async (progress) => {
|
||||
progressPercent = progress.percent;
|
||||
if (bar) {
|
||||
bar.update(progress.percent);
|
||||
}
|
||||
if (getProgressCallback) {
|
||||
await getProgressCallback(progress);
|
||||
}
|
||||
});
|
||||
downloadStream.on('error', error => {
|
||||
if (error instanceof HTTPError && error.response.statusCode === 404) {
|
||||
error.message += ` for ${error.response.url}`;
|
||||
}
|
||||
if (writeStream.destroy) {
|
||||
writeStream.destroy(error);
|
||||
}
|
||||
reject(error);
|
||||
});
|
||||
writeStream.on('error', error => reject(error));
|
||||
writeStream.on('close', () => resolve());
|
||||
downloadStream.pipe(writeStream);
|
||||
});
|
||||
downloadCompleted = true;
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=GotDownloader.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/GotDownloader.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"GotDownloader.js","sourceRoot":"","sources":["../../src/GotDownloader.ts"],"names":[],"mappings":";;;;;;;;;;;AAAA,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,GAAG,EAAE,EAAE,SAAS,EAAkD,MAAM,KAAK,CAAC;AACrF,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,WAAW,MAAM,UAAU,CAAC;AAIxC,MAAM,6BAA6B,GAAG,EAAE,CAAC;AAiBzC,MAAM,OAAO,aAAa;IACxB,KAAK,CAAC,QAAQ,CACZ,GAAW,EACX,cAAsB,EACtB,OAA8B;QAE9B,IAAI,CAAC,OAAO,EAAE;YACZ,OAAO,GAAG,EAAE,CAAC;SACd;QACD,MAAM,EAAE,KAAK,EAAE,mBAAmB,KAAoB,OAAO,EAAzB,8DAAyB,CAAC;QAC9D,IAAI,iBAAiB,GAAG,KAAK,CAAC;QAC9B,IAAI,GAA4B,CAAC;QACjC,IAAI,eAAuB,CAAC;QAC5B,IAAI,OAAO,GAA+B,SAAS,CAAC;QACpD,MAAM,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC;QAC9C,MAAM,WAAW,GAAG,EAAE,CAAC,iBAAiB,CAAC,cAAc,CAAC,CAAC;QAEzD,IAAI,CAAC,KAAK,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,wBAAwB,EAAE;YACnD,MAAM,KAAK,GAAG,IAAI,IAAI,EAAE,CAAC;YACzB,OAAO,GAAG,UAAU,CAAC,GAAG,EAAE;gBACxB,IAAI,CAAC,iBAAiB,EAAE;oBACtB,GAAG,GAAG,IAAI,WAAW,CACnB,eAAe,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,sCAAsC,EACvE;wBACE,IAAI,EAAE,eAAe;wBACrB,KAAK,EAAE,GAAG;qBACX,CACF,CAAC;oBACF,0DAA0D;oBAC1D,8DAA8D;oBAC7D,GAAW,CAAC,KAAK,GAAG,KAAK,CAAC;iBAC5B;YACH,CAAC,EAAE,6BAA6B,GAAG,IAAI,CAAC,CAAC;SAC1C;QACD,MAAM,IAAI,OAAO,CAAO,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC1C,MAAM,cAAc,GAAG,GAAG,CAAC,MAAM,CAAC,GAAG,EAAE,UAAU,CAAC,CAAC;YACnD,cAAc,CAAC,EAAE,CAAC,kBAAkB,EAAE,KAAK,EAAC,QAAQ,EAAC,EAAE;gBACrD,eAAe,GAAG,QAAQ,CAAC,OAAO,CAAC;gBACnC,IAAI,GAAG,EAAE;oBACP,GAAG,CAAC,MAAM,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAC;iBAC9B;gBACD,IAAI,mBAAmB,EAAE;oBACvB,MAAM,mBAAmB,CAAC,QAAQ,CAAC,CAAC;iBACrC;YACH,CAAC,CAAC,CAAC;YACH,cAAc,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE;gBACjC,IAAI,KAAK,YAAY,SAAS,IAAI,KAAK,CAAC,QAAQ,CAAC,UAAU,KAAK,GAAG,EAAE;oBACnE,KAAK,CAAC,OAAO,IAAI,QAAQ,KAAK,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC;iBAC/C;gBACD,IAAI,WAAW,CAAC,OAAO,EAAE;oBACvB,WAAW,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC;iBAC5B;gBAED,MAAM,CAAC,KAAK,CAAC,CAAC;YAChB,CAAC,CAAC,CAAC;YACH,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,KAAK,CAAC,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC;YAChD,WAAW,CAAC,EAAE,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC;YAEzC,cAAc,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC;QACnC,CAAC,CAAC,CAAC;QAEH,iBAAiB,GAAG,IAAI,CAAC;QACzB,IAAI,OAAO,EAAE;YACX,YAAY,CAAC,OAAO,CAAC,CAAC;SACvB;IACH,CAAC;CACF"}
|
||||
4
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.d.ts
generated
vendored
Normal file
4
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { ElectronArtifactDetails } from './types';
|
||||
export declare function getArtifactFileName(details: ElectronArtifactDetails): string;
|
||||
export declare function getArtifactRemoteURL(details: ElectronArtifactDetails): Promise<string>;
|
||||
export declare function getArtifactVersion(details: ElectronArtifactDetails): string;
|
||||
61
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.js
generated
vendored
Normal file
61
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.js
generated
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
import { ensureIsTruthyString, normalizeVersion } from './utils';
|
||||
const BASE_URL = 'https://github.com/electron/electron/releases/download/';
|
||||
const NIGHTLY_BASE_URL = 'https://github.com/electron/nightlies/releases/download/';
|
||||
export function getArtifactFileName(details) {
|
||||
ensureIsTruthyString(details, 'artifactName');
|
||||
if (details.isGeneric) {
|
||||
return details.artifactName;
|
||||
}
|
||||
ensureIsTruthyString(details, 'arch');
|
||||
ensureIsTruthyString(details, 'platform');
|
||||
ensureIsTruthyString(details, 'version');
|
||||
return `${[
|
||||
details.artifactName,
|
||||
details.version,
|
||||
details.platform,
|
||||
details.arch,
|
||||
...(details.artifactSuffix ? [details.artifactSuffix] : []),
|
||||
].join('-')}.zip`;
|
||||
}
|
||||
function mirrorVar(name, options, defaultValue) {
|
||||
// Convert camelCase to camel_case for env var reading
|
||||
const snakeName = name.replace(/([a-z])([A-Z])/g, (_, a, b) => `${a}_${b}`).toLowerCase();
|
||||
return (
|
||||
// .npmrc
|
||||
process.env[`npm_config_electron_${name.toLowerCase()}`] ||
|
||||
process.env[`NPM_CONFIG_ELECTRON_${snakeName.toUpperCase()}`] ||
|
||||
process.env[`npm_config_electron_${snakeName}`] ||
|
||||
// package.json
|
||||
process.env[`npm_package_config_electron_${name}`] ||
|
||||
process.env[`npm_package_config_electron_${snakeName.toLowerCase()}`] ||
|
||||
// env
|
||||
process.env[`ELECTRON_${snakeName.toUpperCase()}`] ||
|
||||
options[name] ||
|
||||
defaultValue);
|
||||
}
|
||||
export async function getArtifactRemoteURL(details) {
|
||||
const opts = details.mirrorOptions || {};
|
||||
let base = mirrorVar('mirror', opts, BASE_URL);
|
||||
if (details.version.includes('nightly')) {
|
||||
const nightlyDeprecated = mirrorVar('nightly_mirror', opts, '');
|
||||
if (nightlyDeprecated) {
|
||||
base = nightlyDeprecated;
|
||||
console.warn(`nightly_mirror is deprecated, please use nightlyMirror`);
|
||||
}
|
||||
else {
|
||||
base = mirrorVar('nightlyMirror', opts, NIGHTLY_BASE_URL);
|
||||
}
|
||||
}
|
||||
const path = mirrorVar('customDir', opts, details.version).replace('{{ version }}', details.version.replace(/^v/, ''));
|
||||
const file = mirrorVar('customFilename', opts, getArtifactFileName(details));
|
||||
// Allow customized download URL resolution.
|
||||
if (opts.resolveAssetURL) {
|
||||
const url = await opts.resolveAssetURL(details);
|
||||
return url;
|
||||
}
|
||||
return `${base}${path}/${file}`;
|
||||
}
|
||||
export function getArtifactVersion(details) {
|
||||
return normalizeVersion(mirrorVar('customVersion', details.mirrorOptions || {}, details.version));
|
||||
}
|
||||
//# sourceMappingURL=artifact-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/artifact-utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"artifact-utils.js","sourceRoot":"","sources":["../../src/artifact-utils.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,oBAAoB,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAEjE,MAAM,QAAQ,GAAG,yDAAyD,CAAC;AAC3E,MAAM,gBAAgB,GAAG,0DAA0D,CAAC;AAEpF,MAAM,UAAU,mBAAmB,CAAC,OAAgC;IAClE,oBAAoB,CAAC,OAAO,EAAE,cAAc,CAAC,CAAC;IAE9C,IAAI,OAAO,CAAC,SAAS,EAAE;QACrB,OAAO,OAAO,CAAC,YAAY,CAAC;KAC7B;IAED,oBAAoB,CAAC,OAAO,EAAE,MAAM,CAAC,CAAC;IACtC,oBAAoB,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC;IAC1C,oBAAoB,CAAC,OAAO,EAAE,SAAS,CAAC,CAAC;IAEzC,OAAO,GAAG;QACR,OAAO,CAAC,YAAY;QACpB,OAAO,CAAC,OAAO;QACf,OAAO,CAAC,QAAQ;QAChB,OAAO,CAAC,IAAI;QACZ,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;KAC5D,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC;AACpB,CAAC;AAED,SAAS,SAAS,CAChB,IAAkD,EAClD,OAAsB,EACtB,YAAoB;IAEpB,sDAAsD;IACtD,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,WAAW,EAAE,CAAC;IAE1F,OAAO;IACL,SAAS;IACT,OAAO,CAAC,GAAG,CAAC,uBAAuB,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QACxD,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,uBAAuB,SAAS,EAAE,CAAC;QAC/C,eAAe;QACf,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,EAAE,CAAC;QAClD,OAAO,CAAC,GAAG,CAAC,+BAA+B,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QACrE,MAAM;QACN,OAAO,CAAC,GAAG,CAAC,YAAY,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;QAClD,OAAO,CAAC,IAAI,CAAC;QACb,YAAY,CACb,CAAC;AACJ,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,oBAAoB,CAAC,OAAgC;IACzE,MAAM,IAAI,GAAkB,OAAO,CAAC,aAAa,IAAI,EAAE,CAAC;IACxD,IAAI,IAAI,GAAG,SAAS,CAAC,QAAQ,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC;IAC/C,IAAI,OAAO,CAAC,OAAO,CAAC,QAAQ,CAAC,SAAS,CAAC,EAAE;QACvC,MAAM,iBAAiB,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC;QAChE,IAAI,iBAAiB,EAAE;YACrB,IAAI,GAAG,iBAAiB,CAAC;YACzB,OAAO,CAAC,IAAI,CAAC,wDAAwD,CAAC,CAAC;SACxE;aAAM;YACL,IAAI,GAAG,SAAS,CAAC,eAAe,EAAE,IAAI,EAAE,gBAAgB,CAAC,CAAC;SAC3D;KACF;IACD,MAAM,IAAI,GAAG,SAAS,CAAC,WAAW,EAAE,IAAI,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,OAAO,CAChE,eAAe,EACf,OAAO,CAAC,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAClC,CAAC;IACF,MAAM,IAAI,GAAG,SAAS,CAAC,gBAAgB,EAAE,IAAI,EAAE,mBAAmB,CAAC,OAAO,CAAC,CAAC,CAAC;IAE7E,4CAA4C;IAC5C,IAAI,IAAI,CAAC,eAAe,EAAE;QACxB,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,eAAe,CAAC,OAAO,CAAC,CAAC;QAChD,OAAO,GAAG,CAAC;KACZ;IAED,OAAO,GAAG,IAAI,GAAG,IAAI,IAAI,IAAI,EAAE,CAAC;AAClC,CAAC;AAED,MAAM,UAAU,kBAAkB,CAAC,OAAgC;IACjE,OAAO,gBAAgB,CAAC,SAAS,CAAC,eAAe,EAAE,OAAO,CAAC,aAAa,IAAI,EAAE,EAAE,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC;AACpG,CAAC"}
|
||||
3
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.d.ts
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.d.ts
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { DownloadOptions } from './types';
|
||||
import { Downloader } from './Downloader';
|
||||
export declare function getDownloaderForSystem(): Promise<Downloader<DownloadOptions>>;
|
||||
9
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.js
generated
vendored
Normal file
9
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
export async function getDownloaderForSystem() {
|
||||
// TODO: Resolve the downloader or default to GotDownloader
|
||||
// Current thoughts are a dot-file traversal for something like
|
||||
// ".electron.downloader" which would be a text file with the name of the
|
||||
// npm module to import() and use as the downloader
|
||||
const { GotDownloader } = await import('./GotDownloader');
|
||||
return new GotDownloader();
|
||||
}
|
||||
//# sourceMappingURL=downloader-resolver.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/downloader-resolver.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"downloader-resolver.js","sourceRoot":"","sources":["../../src/downloader-resolver.ts"],"names":[],"mappings":"AAGA,MAAM,CAAC,KAAK,UAAU,sBAAsB;IAC1C,2DAA2D;IAC3D,+DAA+D;IAC/D,yEAAyE;IACzE,mDAAmD;IACnD,MAAM,EAAE,aAAa,EAAE,GAAG,MAAM,MAAM,CAAC,iBAAiB,CAAC,CAAC;IAC1D,OAAO,IAAI,aAAa,EAAE,CAAC;AAC7B,CAAC"}
|
||||
18
desktop-operator/node_modules/@electron/get/dist/esm/index.d.ts
generated
vendored
Normal file
18
desktop-operator/node_modules/@electron/get/dist/esm/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import { ElectronDownloadRequestOptions, ElectronPlatformArtifactDetailsWithDefaults } from './types';
|
||||
export { getHostArch } from './utils';
|
||||
export { initializeProxy } from './proxy';
|
||||
export * from './types';
|
||||
/**
|
||||
* Downloads an artifact from an Electron release and returns an absolute path
|
||||
* to the downloaded file.
|
||||
*
|
||||
* @param artifactDetails - The information required to download the artifact
|
||||
*/
|
||||
export declare function downloadArtifact(_artifactDetails: ElectronPlatformArtifactDetailsWithDefaults): Promise<string>;
|
||||
/**
|
||||
* Downloads a specific version of Electron and returns an absolute path to a
|
||||
* ZIP file.
|
||||
*
|
||||
* @param version - The version of Electron you want to download
|
||||
*/
|
||||
export declare function download(version: string, options?: ElectronDownloadRequestOptions): Promise<string>;
|
||||
134
desktop-operator/node_modules/@electron/get/dist/esm/index.js
generated
vendored
Normal file
134
desktop-operator/node_modules/@electron/get/dist/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
import debug from 'debug';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
import * as semver from 'semver';
|
||||
import * as sumchecker from 'sumchecker';
|
||||
import { getArtifactFileName, getArtifactRemoteURL, getArtifactVersion } from './artifact-utils';
|
||||
import { Cache } from './Cache';
|
||||
import { getDownloaderForSystem } from './downloader-resolver';
|
||||
import { initializeProxy } from './proxy';
|
||||
import { withTempDirectoryIn, getHostArch, getNodeArch, ensureIsTruthyString, isOfficialLinuxIA32Download, } from './utils';
|
||||
export { getHostArch } from './utils';
|
||||
export { initializeProxy } from './proxy';
|
||||
const d = debug('@electron/get:index');
|
||||
if (process.env.ELECTRON_GET_USE_PROXY) {
|
||||
initializeProxy();
|
||||
}
|
||||
async function validateArtifact(artifactDetails, downloadedAssetPath, _downloadArtifact) {
|
||||
return await withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
|
||||
// Don't try to verify the hash of the hash file itself
|
||||
// and for older versions that don't have a SHASUMS256.txt
|
||||
if (!artifactDetails.artifactName.startsWith('SHASUMS256') &&
|
||||
!artifactDetails.unsafelyDisableChecksums &&
|
||||
semver.gte(artifactDetails.version, '1.3.2')) {
|
||||
let shasumPath;
|
||||
const checksums = artifactDetails.checksums;
|
||||
if (checksums) {
|
||||
shasumPath = path.resolve(tempFolder, 'SHASUMS256.txt');
|
||||
const fileNames = Object.keys(checksums);
|
||||
if (fileNames.length === 0) {
|
||||
throw new Error('Provided "checksums" object is empty, cannot generate a valid SHASUMS256.txt');
|
||||
}
|
||||
const generatedChecksums = fileNames
|
||||
.map(fileName => `${checksums[fileName]} *${fileName}`)
|
||||
.join('\n');
|
||||
await fs.writeFile(shasumPath, generatedChecksums);
|
||||
}
|
||||
else {
|
||||
shasumPath = await _downloadArtifact({
|
||||
isGeneric: true,
|
||||
version: artifactDetails.version,
|
||||
artifactName: 'SHASUMS256.txt',
|
||||
force: artifactDetails.force,
|
||||
downloadOptions: artifactDetails.downloadOptions,
|
||||
cacheRoot: artifactDetails.cacheRoot,
|
||||
downloader: artifactDetails.downloader,
|
||||
mirrorOptions: artifactDetails.mirrorOptions,
|
||||
});
|
||||
}
|
||||
// For versions 1.3.2 - 1.3.4, need to overwrite the `defaultTextEncoding` option:
|
||||
// https://github.com/electron/electron/pull/6676#discussion_r75332120
|
||||
if (semver.satisfies(artifactDetails.version, '1.3.2 - 1.3.4')) {
|
||||
const validatorOptions = {};
|
||||
validatorOptions.defaultTextEncoding = 'binary';
|
||||
const checker = new sumchecker.ChecksumValidator('sha256', shasumPath, validatorOptions);
|
||||
await checker.validate(path.dirname(downloadedAssetPath), path.basename(downloadedAssetPath));
|
||||
}
|
||||
else {
|
||||
await sumchecker('sha256', shasumPath, path.dirname(downloadedAssetPath), [
|
||||
path.basename(downloadedAssetPath),
|
||||
]);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Downloads an artifact from an Electron release and returns an absolute path
|
||||
* to the downloaded file.
|
||||
*
|
||||
* @param artifactDetails - The information required to download the artifact
|
||||
*/
|
||||
export async function downloadArtifact(_artifactDetails) {
|
||||
const artifactDetails = Object.assign({}, _artifactDetails);
|
||||
if (!_artifactDetails.isGeneric) {
|
||||
const platformArtifactDetails = artifactDetails;
|
||||
if (!platformArtifactDetails.platform) {
|
||||
d('No platform found, defaulting to the host platform');
|
||||
platformArtifactDetails.platform = process.platform;
|
||||
}
|
||||
if (platformArtifactDetails.arch) {
|
||||
platformArtifactDetails.arch = getNodeArch(platformArtifactDetails.arch);
|
||||
}
|
||||
else {
|
||||
d('No arch found, defaulting to the host arch');
|
||||
platformArtifactDetails.arch = getHostArch();
|
||||
}
|
||||
}
|
||||
ensureIsTruthyString(artifactDetails, 'version');
|
||||
artifactDetails.version = getArtifactVersion(artifactDetails);
|
||||
const fileName = getArtifactFileName(artifactDetails);
|
||||
const url = await getArtifactRemoteURL(artifactDetails);
|
||||
const cache = new Cache(artifactDetails.cacheRoot);
|
||||
// Do not check if the file exists in the cache when force === true
|
||||
if (!artifactDetails.force) {
|
||||
d(`Checking the cache (${artifactDetails.cacheRoot}) for ${fileName} (${url})`);
|
||||
const cachedPath = await cache.getPathForFileInCache(url, fileName);
|
||||
if (cachedPath === null) {
|
||||
d('Cache miss');
|
||||
}
|
||||
else {
|
||||
d('Cache hit');
|
||||
try {
|
||||
await validateArtifact(artifactDetails, cachedPath, downloadArtifact);
|
||||
return cachedPath;
|
||||
}
|
||||
catch (err) {
|
||||
d("Artifact in cache didn't match checksums", err);
|
||||
d('falling back to re-download');
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!artifactDetails.isGeneric &&
|
||||
isOfficialLinuxIA32Download(artifactDetails.platform, artifactDetails.arch, artifactDetails.version, artifactDetails.mirrorOptions)) {
|
||||
console.warn('Official Linux/ia32 support is deprecated.');
|
||||
console.warn('For more info: https://electronjs.org/blog/linux-32bit-support');
|
||||
}
|
||||
return await withTempDirectoryIn(artifactDetails.tempDirectory, async (tempFolder) => {
|
||||
const tempDownloadPath = path.resolve(tempFolder, getArtifactFileName(artifactDetails));
|
||||
const downloader = artifactDetails.downloader || (await getDownloaderForSystem());
|
||||
d(`Downloading ${url} to ${tempDownloadPath} with options: ${JSON.stringify(artifactDetails.downloadOptions)}`);
|
||||
await downloader.download(url, tempDownloadPath, artifactDetails.downloadOptions);
|
||||
await validateArtifact(artifactDetails, tempDownloadPath, downloadArtifact);
|
||||
return await cache.putFileInCache(url, tempDownloadPath, fileName);
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Downloads a specific version of Electron and returns an absolute path to a
|
||||
* ZIP file.
|
||||
*
|
||||
* @param version - The version of Electron you want to download
|
||||
*/
|
||||
export function download(version, options) {
|
||||
return downloadArtifact(Object.assign(Object.assign({}, options), { version, platform: process.platform, arch: process.arch, artifactName: 'electron' }));
|
||||
}
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAC1B,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAC7B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,KAAK,UAAU,MAAM,YAAY,CAAC;AAEzC,OAAO,EAAE,mBAAmB,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,MAAM,kBAAkB,CAAC;AAOjG,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,sBAAsB,EAAE,MAAM,uBAAuB,CAAC;AAC/D,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAC1C,OAAO,EACL,mBAAmB,EACnB,WAAW,EACX,WAAW,EACX,oBAAoB,EACpB,2BAA2B,GAE5B,MAAM,SAAS,CAAC;AAEjB,OAAO,EAAE,WAAW,EAAE,MAAM,SAAS,CAAC;AACtC,OAAO,EAAE,eAAe,EAAE,MAAM,SAAS,CAAC;AAG1C,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,EAAE;IACtC,eAAe,EAAE,CAAC;CACnB;AAMD,KAAK,UAAU,gBAAgB,CAC7B,eAAwC,EACxC,mBAA2B,EAC3B,iBAAqC;IAErC,OAAO,MAAM,mBAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,uDAAuD;QACvD,0DAA0D;QAC1D,IACE,CAAC,eAAe,CAAC,YAAY,CAAC,UAAU,CAAC,YAAY,CAAC;YACtD,CAAC,eAAe,CAAC,wBAAwB;YACzC,MAAM,CAAC,GAAG,CAAC,eAAe,CAAC,OAAO,EAAE,OAAO,CAAC,EAC5C;YACA,IAAI,UAAkB,CAAC;YACvB,MAAM,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;YAC5C,IAAI,SAAS,EAAE;gBACb,UAAU,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACxD,MAAM,SAAS,GAAa,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;gBACnD,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;oBAC1B,MAAM,IAAI,KAAK,CACb,8EAA8E,CAC/E,CAAC;iBACH;gBACD,MAAM,kBAAkB,GAAG,SAAS;qBACjC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,QAAQ,EAAE,CAAC;qBACtD,IAAI,CAAC,IAAI,CAAC,CAAC;gBACd,MAAM,EAAE,CAAC,SAAS,CAAC,UAAU,EAAE,kBAAkB,CAAC,CAAC;aACpD;iBAAM;gBACL,UAAU,GAAG,MAAM,iBAAiB,CAAC;oBACnC,SAAS,EAAE,IAAI;oBACf,OAAO,EAAE,eAAe,CAAC,OAAO;oBAChC,YAAY,EAAE,gBAAgB;oBAC9B,KAAK,EAAE,eAAe,CAAC,KAAK;oBAC5B,eAAe,EAAE,eAAe,CAAC,eAAe;oBAChD,SAAS,EAAE,eAAe,CAAC,SAAS;oBACpC,UAAU,EAAE,eAAe,CAAC,UAAU;oBACtC,aAAa,EAAE,eAAe,CAAC,aAAa;iBAC7C,CAAC,CAAC;aACJ;YAED,kFAAkF;YAClF,sEAAsE;YACtE,IAAI,MAAM,CAAC,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,eAAe,CAAC,EAAE;gBAC9D,MAAM,gBAAgB,GAA+B,EAAE,CAAC;gBACxD,gBAAgB,CAAC,mBAAmB,GAAG,QAAQ,CAAC;gBAChD,MAAM,OAAO,GAAG,IAAI,UAAU,CAAC,iBAAiB,CAAC,QAAQ,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBACzF,MAAM,OAAO,CAAC,QAAQ,CACpB,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EACjC,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC,CACnC,CAAC;aACH;iBAAM;gBACL,MAAM,UAAU,CAAC,QAAQ,EAAE,UAAU,EAAE,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,EAAE;oBACxE,IAAI,CAAC,QAAQ,CAAC,mBAAmB,CAAC;iBACnC,CAAC,CAAC;aACJ;SACF;IACH,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB,CACpC,gBAA6D;IAE7D,MAAM,eAAe,qBACf,gBAA4C,CACjD,CAAC;IACF,IAAI,CAAC,gBAAgB,CAAC,SAAS,EAAE;QAC/B,MAAM,uBAAuB,GAAG,eAAkD,CAAC;QACnF,IAAI,CAAC,uBAAuB,CAAC,QAAQ,EAAE;YACrC,CAAC,CAAC,oDAAoD,CAAC,CAAC;YACxD,uBAAuB,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;SACrD;QACD,IAAI,uBAAuB,CAAC,IAAI,EAAE;YAChC,uBAAuB,CAAC,IAAI,GAAG,WAAW,CAAC,uBAAuB,CAAC,IAAI,CAAC,CAAC;SAC1E;aAAM;YACL,CAAC,CAAC,4CAA4C,CAAC,CAAC;YAChD,uBAAuB,CAAC,IAAI,GAAG,WAAW,EAAE,CAAC;SAC9C;KACF;IACD,oBAAoB,CAAC,eAAe,EAAE,SAAS,CAAC,CAAC;IAEjD,eAAe,CAAC,OAAO,GAAG,kBAAkB,CAAC,eAAe,CAAC,CAAC;IAC9D,MAAM,QAAQ,GAAG,mBAAmB,CAAC,eAAe,CAAC,CAAC;IACtD,MAAM,GAAG,GAAG,MAAM,oBAAoB,CAAC,eAAe,CAAC,CAAC;IACxD,MAAM,KAAK,GAAG,IAAI,KAAK,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;IAEnD,mEAAmE;IACnE,IAAI,CAAC,eAAe,CAAC,KAAK,EAAE;QAC1B,CAAC,CAAC,uBAAuB,eAAe,CAAC,SAAS,SAAS,QAAQ,KAAK,GAAG,GAAG,CAAC,CAAC;QAChF,MAAM,UAAU,GAAG,MAAM,KAAK,CAAC,qBAAqB,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC;QAEpE,IAAI,UAAU,KAAK,IAAI,EAAE;YACvB,CAAC,CAAC,YAAY,CAAC,CAAC;SACjB;aAAM;YACL,CAAC,CAAC,WAAW,CAAC,CAAC;YACf,IAAI;gBACF,MAAM,gBAAgB,CAAC,eAAe,EAAE,UAAU,EAAE,gBAAgB,CAAC,CAAC;gBAEtE,OAAO,UAAU,CAAC;aACnB;YAAC,OAAO,GAAG,EAAE;gBACZ,CAAC,CAAC,0CAA0C,EAAE,GAAG,CAAC,CAAC;gBACnD,CAAC,CAAC,6BAA6B,CAAC,CAAC;aAClC;SACF;KACF;IAED,IACE,CAAC,eAAe,CAAC,SAAS;QAC1B,2BAA2B,CACzB,eAAe,CAAC,QAAQ,EACxB,eAAe,CAAC,IAAI,EACpB,eAAe,CAAC,OAAO,EACvB,eAAe,CAAC,aAAa,CAC9B,EACD;QACA,OAAO,CAAC,IAAI,CAAC,4CAA4C,CAAC,CAAC;QAC3D,OAAO,CAAC,IAAI,CAAC,gEAAgE,CAAC,CAAC;KAChF;IAED,OAAO,MAAM,mBAAmB,CAAC,eAAe,CAAC,aAAa,EAAE,KAAK,EAAC,UAAU,EAAC,EAAE;QACjF,MAAM,gBAAgB,GAAG,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,mBAAmB,CAAC,eAAe,CAAC,CAAC,CAAC;QAExF,MAAM,UAAU,GAAG,eAAe,CAAC,UAAU,IAAI,CAAC,MAAM,sBAAsB,EAAE,CAAC,CAAC;QAClF,CAAC,CACC,eAAe,GAAG,OAAO,gBAAgB,kBAAkB,IAAI,CAAC,SAAS,CACvE,eAAe,CAAC,eAAe,CAChC,EAAE,CACJ,CAAC;QACF,MAAM,UAAU,CAAC,QAAQ,CAAC,GAAG,EAAE,gBAAgB,EAAE,eAAe,CAAC,eAAe,CAAC,CAAC;QAElF,MAAM,gBAAgB,CAAC,eAAe,EAAE,gBAAgB,EAAE,gBAAgB,CAAC,CAAC;QAE5E,OAAO,MAAM,KAAK,CAAC,cAAc,CAAC,GAAG,EAAE,gBAAgB,EAAE,QAAQ,CAAC,CAAC;IACrE,CAAC,CAAC,CAAC;AACL,CAAC;AAED;;;;;GAKG;AACH,MAAM,UAAU,QAAQ,CACtB,OAAe,EACf,OAAwC;IAExC,OAAO,gBAAgB,iCAClB,OAAO,KACV,OAAO,EACP,QAAQ,EAAE,OAAO,CAAC,QAAQ,EAC1B,IAAI,EAAE,OAAO,CAAC,IAAI,EAClB,YAAY,EAAE,UAAU,IACxB,CAAC;AACL,CAAC"}
|
||||
4
desktop-operator/node_modules/@electron/get/dist/esm/proxy.d.ts
generated
vendored
Normal file
4
desktop-operator/node_modules/@electron/get/dist/esm/proxy.d.ts
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Initializes a third-party proxy module for HTTP(S) requests.
|
||||
*/
|
||||
export declare function initializeProxy(): void;
|
||||
24
desktop-operator/node_modules/@electron/get/dist/esm/proxy.js
generated
vendored
Normal file
24
desktop-operator/node_modules/@electron/get/dist/esm/proxy.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
import * as debug from 'debug';
|
||||
import { getEnv, setEnv } from './utils';
|
||||
const d = debug('@electron/get:proxy');
|
||||
/**
|
||||
* Initializes a third-party proxy module for HTTP(S) requests.
|
||||
*/
|
||||
export function initializeProxy() {
|
||||
try {
|
||||
// See: https://github.com/electron/get/pull/214#discussion_r798845713
|
||||
const env = getEnv('GLOBAL_AGENT_');
|
||||
setEnv('GLOBAL_AGENT_HTTP_PROXY', env('HTTP_PROXY'));
|
||||
setEnv('GLOBAL_AGENT_HTTPS_PROXY', env('HTTPS_PROXY'));
|
||||
setEnv('GLOBAL_AGENT_NO_PROXY', env('NO_PROXY'));
|
||||
/**
|
||||
* TODO: replace global-agent with a hpagent. @BlackHole1
|
||||
* https://github.com/sindresorhus/got/blob/HEAD/documentation/tips.md#proxying
|
||||
*/
|
||||
require('global-agent').bootstrap();
|
||||
}
|
||||
catch (e) {
|
||||
d('Could not load either proxy modules, built-in proxy support not available:', e);
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=proxy.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/proxy.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/proxy.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"proxy.js","sourceRoot":"","sources":["../../src/proxy.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,SAAS,CAAC;AAEzC,MAAM,CAAC,GAAG,KAAK,CAAC,qBAAqB,CAAC,CAAC;AAEvC;;GAEG;AACH,MAAM,UAAU,eAAe;IAC7B,IAAI;QACF,sEAAsE;QACtE,MAAM,GAAG,GAAG,MAAM,CAAC,eAAe,CAAC,CAAC;QAEpC,MAAM,CAAC,yBAAyB,EAAE,GAAG,CAAC,YAAY,CAAC,CAAC,CAAC;QACrD,MAAM,CAAC,0BAA0B,EAAE,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;QACvD,MAAM,CAAC,uBAAuB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC;QAEjD;;;WAGG;QACH,OAAO,CAAC,cAAc,CAAC,CAAC,SAAS,EAAE,CAAC;KACrC;IAAC,OAAO,CAAC,EAAE;QACV,CAAC,CAAC,4EAA4E,EAAE,CAAC,CAAC,CAAC;KACpF;AACH,CAAC"}
|
||||
129
desktop-operator/node_modules/@electron/get/dist/esm/types.d.ts
generated
vendored
Normal file
129
desktop-operator/node_modules/@electron/get/dist/esm/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
import { Downloader } from './Downloader';
|
||||
export declare type DownloadOptions = any;
|
||||
export interface MirrorOptions {
|
||||
/**
|
||||
* DEPRECATED - see nightlyMirror.
|
||||
*/
|
||||
nightly_mirror?: string;
|
||||
/**
|
||||
* The Electron nightly-specific mirror URL.
|
||||
*/
|
||||
nightlyMirror?: string;
|
||||
/**
|
||||
* The base URL of the mirror to download from,
|
||||
* e.g https://github.com/electron/electron/releases/download
|
||||
*/
|
||||
mirror?: string;
|
||||
/**
|
||||
* The name of the directory to download from,
|
||||
* often scoped by version number e.g 'v4.0.4'
|
||||
*/
|
||||
customDir?: string;
|
||||
/**
|
||||
* The name of the asset to download,
|
||||
* e.g 'electron-v4.0.4-linux-x64.zip'
|
||||
*/
|
||||
customFilename?: string;
|
||||
/**
|
||||
* The version of the asset to download,
|
||||
* e.g '4.0.4'
|
||||
*/
|
||||
customVersion?: string;
|
||||
/**
|
||||
* A function allowing customization of the url returned
|
||||
* from getArtifactRemoteURL().
|
||||
*/
|
||||
resolveAssetURL?: (opts: DownloadOptions) => Promise<string>;
|
||||
}
|
||||
export interface ElectronDownloadRequest {
|
||||
/**
|
||||
* The version of Electron associated with the artifact.
|
||||
*/
|
||||
version: string;
|
||||
/**
|
||||
* The type of artifact. For example:
|
||||
* * `electron`
|
||||
* * `ffmpeg`
|
||||
*/
|
||||
artifactName: string;
|
||||
}
|
||||
export interface ElectronDownloadRequestOptions {
|
||||
/**
|
||||
* Whether to download an artifact regardless of whether it's in the cache directory.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
force?: boolean;
|
||||
/**
|
||||
* When set to `true`, disables checking that the artifact download completed successfully
|
||||
* with the correct payload.
|
||||
*
|
||||
* Defaults to `false`.
|
||||
*/
|
||||
unsafelyDisableChecksums?: boolean;
|
||||
/**
|
||||
* Provides checksums for the artifact as strings.
|
||||
* Can be used if you already know the checksums of the Electron artifact
|
||||
* you are downloading and want to skip the checksum file download
|
||||
* without skipping the checksum validation.
|
||||
*
|
||||
* This should be an object whose keys are the file names of the artifacts and
|
||||
* the values are their respective SHA256 checksums.
|
||||
*/
|
||||
checksums?: Record<string, string>;
|
||||
/**
|
||||
* The directory that caches Electron artifact downloads.
|
||||
*
|
||||
* The default value is dependent upon the host platform:
|
||||
*
|
||||
* * Linux: `$XDG_CACHE_HOME` or `~/.cache/electron/`
|
||||
* * MacOS: `~/Library/Caches/electron/`
|
||||
* * Windows: `%LOCALAPPDATA%/electron/Cache` or `~/AppData/Local/electron/Cache/`
|
||||
*/
|
||||
cacheRoot?: string;
|
||||
/**
|
||||
* Options passed to the downloader module.
|
||||
*/
|
||||
downloadOptions?: DownloadOptions;
|
||||
/**
|
||||
* Options related to specifying an artifact mirror.
|
||||
*/
|
||||
mirrorOptions?: MirrorOptions;
|
||||
/**
|
||||
* The custom [[Downloader]] class used to download artifacts. Defaults to the
|
||||
* built-in [[GotDownloader]].
|
||||
*/
|
||||
downloader?: Downloader<DownloadOptions>;
|
||||
/**
|
||||
* A temporary directory for downloads.
|
||||
* It is used before artifacts are put into cache.
|
||||
*/
|
||||
tempDirectory?: string;
|
||||
}
|
||||
export declare type ElectronPlatformArtifactDetails = {
|
||||
/**
|
||||
* The target artifact platform. These are Node-style platform names, for example:
|
||||
* * `win32`
|
||||
* * `darwin`
|
||||
* * `linux`
|
||||
*/
|
||||
platform: string;
|
||||
/**
|
||||
* The target artifact architecture. These are Node-style architecture names, for example:
|
||||
* * `ia32`
|
||||
* * `x64`
|
||||
* * `armv7l`
|
||||
*/
|
||||
arch: string;
|
||||
artifactSuffix?: string;
|
||||
isGeneric?: false;
|
||||
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
|
||||
export declare type ElectronGenericArtifactDetails = {
|
||||
isGeneric: true;
|
||||
} & ElectronDownloadRequest & ElectronDownloadRequestOptions;
|
||||
export declare type ElectronArtifactDetails = ElectronPlatformArtifactDetails | ElectronGenericArtifactDetails;
|
||||
export declare type Omit<T, K> = Pick<T, Exclude<keyof T, K>>;
|
||||
export declare type ElectronPlatformArtifactDetailsWithDefaults = (Omit<ElectronPlatformArtifactDetails, 'platform' | 'arch'> & {
|
||||
platform?: string;
|
||||
arch?: string;
|
||||
}) | ElectronGenericArtifactDetails;
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/types.js
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/types.js
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/types.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/types.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/types.ts"],"names":[],"mappings":""}
|
||||
25
desktop-operator/node_modules/@electron/get/dist/esm/utils.d.ts
generated
vendored
Normal file
25
desktop-operator/node_modules/@electron/get/dist/esm/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
export declare function withTempDirectoryIn<T>(parentDirectory: string | undefined, fn: (directory: string) => Promise<T>): Promise<T>;
|
||||
export declare function withTempDirectory<T>(fn: (directory: string) => Promise<T>): Promise<T>;
|
||||
export declare function normalizeVersion(version: string): string;
|
||||
/**
|
||||
* Runs the `uname` command and returns the trimmed output.
|
||||
*/
|
||||
export declare function uname(): string;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name.
|
||||
*/
|
||||
export declare function getNodeArch(arch: string): string;
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name, from the `process` module information.
|
||||
*/
|
||||
export declare function getHostArch(): string;
|
||||
export declare function ensureIsTruthyString<T, K extends keyof T>(obj: T, key: K): void;
|
||||
export declare function isOfficialLinuxIA32Download(platform: string, arch: string, version: string, mirrorOptions?: object): boolean;
|
||||
/**
|
||||
* Find the value of a environment variable which may or may not have the
|
||||
* prefix, in a case-insensitive manner.
|
||||
*/
|
||||
export declare function getEnv(prefix?: string): (name: string) => string | undefined;
|
||||
export declare function setEnv(key: string, value: string | undefined): void;
|
||||
95
desktop-operator/node_modules/@electron/get/dist/esm/utils.js
generated
vendored
Normal file
95
desktop-operator/node_modules/@electron/get/dist/esm/utils.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
import * as childProcess from 'child_process';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
async function useAndRemoveDirectory(directory, fn) {
|
||||
let result;
|
||||
try {
|
||||
result = await fn(directory);
|
||||
}
|
||||
finally {
|
||||
await fs.remove(directory);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
export async function withTempDirectoryIn(parentDirectory = os.tmpdir(), fn) {
|
||||
const tempDirectoryPrefix = 'electron-download-';
|
||||
const tempDirectory = await fs.mkdtemp(path.resolve(parentDirectory, tempDirectoryPrefix));
|
||||
return useAndRemoveDirectory(tempDirectory, fn);
|
||||
}
|
||||
export async function withTempDirectory(fn) {
|
||||
return withTempDirectoryIn(undefined, fn);
|
||||
}
|
||||
export function normalizeVersion(version) {
|
||||
if (!version.startsWith('v')) {
|
||||
return `v${version}`;
|
||||
}
|
||||
return version;
|
||||
}
|
||||
/**
|
||||
* Runs the `uname` command and returns the trimmed output.
|
||||
*/
|
||||
export function uname() {
|
||||
return childProcess
|
||||
.execSync('uname -m')
|
||||
.toString()
|
||||
.trim();
|
||||
}
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name.
|
||||
*/
|
||||
export function getNodeArch(arch) {
|
||||
if (arch === 'arm') {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
switch (process.config.variables.arm_version) {
|
||||
case '6':
|
||||
return uname();
|
||||
case '7':
|
||||
default:
|
||||
return 'armv7l';
|
||||
}
|
||||
}
|
||||
return arch;
|
||||
}
|
||||
/**
|
||||
* Generates an architecture name that would be used in an Electron or Node.js
|
||||
* download file name, from the `process` module information.
|
||||
*/
|
||||
export function getHostArch() {
|
||||
return getNodeArch(process.arch);
|
||||
}
|
||||
export function ensureIsTruthyString(obj, key) {
|
||||
if (!obj[key] || typeof obj[key] !== 'string') {
|
||||
throw new Error(`Expected property "${key}" to be provided as a string but it was not`);
|
||||
}
|
||||
}
|
||||
export function isOfficialLinuxIA32Download(platform, arch, version, mirrorOptions) {
|
||||
return (platform === 'linux' &&
|
||||
arch === 'ia32' &&
|
||||
Number(version.slice(1).split('.')[0]) >= 4 &&
|
||||
typeof mirrorOptions === 'undefined');
|
||||
}
|
||||
/**
|
||||
* Find the value of a environment variable which may or may not have the
|
||||
* prefix, in a case-insensitive manner.
|
||||
*/
|
||||
export function getEnv(prefix = '') {
|
||||
const envsLowerCase = {};
|
||||
for (const envKey in process.env) {
|
||||
envsLowerCase[envKey.toLowerCase()] = process.env[envKey];
|
||||
}
|
||||
return (name) => {
|
||||
return (envsLowerCase[`${prefix}${name}`.toLowerCase()] ||
|
||||
envsLowerCase[name.toLowerCase()] ||
|
||||
undefined);
|
||||
};
|
||||
}
|
||||
export function setEnv(key, value) {
|
||||
// The `void` operator always returns `undefined`.
|
||||
// See: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/void
|
||||
if (value !== void 0) {
|
||||
process.env[key] = value;
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/get/dist/esm/utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/get/dist/esm/utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"utils.js","sourceRoot":"","sources":["../../src/utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,YAAY,MAAM,eAAe,CAAC;AAC9C,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,EAAE,MAAM,IAAI,CAAC;AACzB,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,KAAK,UAAU,qBAAqB,CAClC,SAAiB,EACjB,EAAqC;IAErC,IAAI,MAAS,CAAC;IACd,IAAI;QACF,MAAM,GAAG,MAAM,EAAE,CAAC,SAAS,CAAC,CAAC;KAC9B;YAAS;QACR,MAAM,EAAE,CAAC,MAAM,CAAC,SAAS,CAAC,CAAC;KAC5B;IACD,OAAO,MAAM,CAAC;AAChB,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,mBAAmB,CACvC,kBAA0B,EAAE,CAAC,MAAM,EAAE,EACrC,EAAqC;IAErC,MAAM,mBAAmB,GAAG,oBAAoB,CAAC;IACjD,MAAM,aAAa,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE,mBAAmB,CAAC,CAAC,CAAC;IAC3F,OAAO,qBAAqB,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC;AAClD,CAAC;AAED,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAI,EAAqC;IAC9E,OAAO,mBAAmB,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC;AAC5C,CAAC;AAED,MAAM,UAAU,gBAAgB,CAAC,OAAe;IAC9C,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,EAAE;QAC5B,OAAO,IAAI,OAAO,EAAE,CAAC;KACtB;IACD,OAAO,OAAO,CAAC;AACjB,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,KAAK;IACnB,OAAO,YAAY;SAChB,QAAQ,CAAC,UAAU,CAAC;SACpB,QAAQ,EAAE;SACV,IAAI,EAAE,CAAC;AACZ,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,IAAY;IACtC,IAAI,IAAI,KAAK,KAAK,EAAE;QAClB,8DAA8D;QAC9D,QAAS,OAAO,CAAC,MAAM,CAAC,SAAiB,CAAC,WAAW,EAAE;YACrD,KAAK,GAAG;gBACN,OAAO,KAAK,EAAE,CAAC;YACjB,KAAK,GAAG,CAAC;YACT;gBACE,OAAO,QAAQ,CAAC;SACnB;KACF;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW;IACzB,OAAO,WAAW,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,MAAM,UAAU,oBAAoB,CAAuB,GAAM,EAAE,GAAM;IACvE,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,OAAO,GAAG,CAAC,GAAG,CAAC,KAAK,QAAQ,EAAE;QAC7C,MAAM,IAAI,KAAK,CAAC,sBAAsB,GAAG,6CAA6C,CAAC,CAAC;KACzF;AACH,CAAC;AAED,MAAM,UAAU,2BAA2B,CACzC,QAAgB,EAChB,IAAY,EACZ,OAAe,EACf,aAAsB;IAEtB,OAAO,CACL,QAAQ,KAAK,OAAO;QACpB,IAAI,KAAK,MAAM;QACf,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;QAC3C,OAAO,aAAa,KAAK,WAAW,CACrC,CAAC;AACJ,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,MAAM,CAAC,MAAM,GAAG,EAAE;IAChC,MAAM,aAAa,GAAsB,EAAE,CAAC;IAE5C,KAAK,MAAM,MAAM,IAAI,OAAO,CAAC,GAAG,EAAE;QAChC,aAAa,CAAC,MAAM,CAAC,WAAW,EAAE,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;KAC3D;IAED,OAAO,CAAC,IAAY,EAAsB,EAAE;QAC1C,OAAO,CACL,aAAa,CAAC,GAAG,MAAM,GAAG,IAAI,EAAE,CAAC,WAAW,EAAE,CAAC;YAC/C,aAAa,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC;YACjC,SAAS,CACV,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC;AAED,MAAM,UAAU,MAAM,CAAC,GAAW,EAAE,KAAyB;IAC3D,kDAAkD;IAClD,wFAAwF;IACxF,IAAI,KAAK,KAAK,KAAK,CAAC,EAAE;QACpB,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC;KAC1B;AACH,CAAC"}
|
||||
100
desktop-operator/node_modules/@electron/get/package.json
generated
vendored
Normal file
100
desktop-operator/node_modules/@electron/get/package.json
generated
vendored
Normal file
@@ -0,0 +1,100 @@
|
||||
{
|
||||
"name": "@electron/get",
|
||||
"version": "2.0.3",
|
||||
"description": "Utility for downloading artifacts from different versions of Electron",
|
||||
"main": "dist/cjs/index.js",
|
||||
"module": "dist/esm/index.js",
|
||||
"repository": "https://github.com/electron/get",
|
||||
"author": "Samuel Attard",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc && tsc -p tsconfig.esm.json",
|
||||
"build:docs": "typedoc --out docs",
|
||||
"eslint": "eslint --ext .ts src test",
|
||||
"jest": "jest --coverage",
|
||||
"lint": "npm run prettier && npm run eslint",
|
||||
"prettier": "prettier --check \"src/**/*.ts\" \"test/**/*.ts\"",
|
||||
"prepublishOnly": "npm run build",
|
||||
"test": "npm run lint && npm run jest",
|
||||
"test:nonetwork": "npm run lint && npm run jest -- --testPathIgnorePatterns network.spec"
|
||||
},
|
||||
"files": [
|
||||
"dist/*",
|
||||
"README.md"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"env-paths": "^2.2.0",
|
||||
"fs-extra": "^8.1.0",
|
||||
"got": "^11.8.5",
|
||||
"progress": "^2.0.3",
|
||||
"semver": "^6.2.0",
|
||||
"sumchecker": "^3.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@continuous-auth/semantic-release-npm": "^3.0.0",
|
||||
"@types/debug": "^4.1.4",
|
||||
"@types/fs-extra": "^8.0.0",
|
||||
"@types/jest": "^24.0.13",
|
||||
"@types/node": "^12.20.55",
|
||||
"@types/progress": "^2.0.3",
|
||||
"@types/semver": "^6.2.0",
|
||||
"@typescript-eslint/eslint-plugin": "^2.34.0",
|
||||
"@typescript-eslint/parser": "^2.34.0",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-config-prettier": "^6.15.0",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-jest": "< 24.0.0",
|
||||
"husky": "^2.3.0",
|
||||
"jest": "^24.8.0",
|
||||
"lint-staged": "^8.1.7",
|
||||
"prettier": "^1.17.1",
|
||||
"ts-jest": "^24.0.0",
|
||||
"typedoc": "^0.17.2",
|
||||
"typescript": "^3.8.0"
|
||||
},
|
||||
"eslintConfig": {
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/eslint-recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:jest/recommended",
|
||||
"plugin:import/errors",
|
||||
"plugin:import/warnings",
|
||||
"plugin:import/typescript",
|
||||
"prettier",
|
||||
"prettier/@typescript-eslint"
|
||||
]
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "lint-staged"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.ts": [
|
||||
"eslint --fix",
|
||||
"prettier --write",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"keywords": [
|
||||
"electron",
|
||||
"download",
|
||||
"prebuild",
|
||||
"get",
|
||||
"artifact",
|
||||
"release"
|
||||
],
|
||||
"optionalDependencies": {
|
||||
"global-agent": "^3.0.0"
|
||||
},
|
||||
"resolutions": {
|
||||
"eslint/inquirer": "< 7.3.0",
|
||||
"**/@typescript-eslint/typescript-estree/semver": "^6.3.0"
|
||||
}
|
||||
}
|
||||
7
desktop-operator/node_modules/@electron/notarize/LICENSE
generated
vendored
Normal file
7
desktop-operator/node_modules/@electron/notarize/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
Copyright 2018 Samuel Attard and contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
190
desktop-operator/node_modules/@electron/notarize/README.md
generated
vendored
Normal file
190
desktop-operator/node_modules/@electron/notarize/README.md
generated
vendored
Normal file
@@ -0,0 +1,190 @@
|
||||
Electron Notarize
|
||||
-----------
|
||||
|
||||
> Notarize your Electron apps seamlessly for macOS
|
||||
|
||||
[](https://circleci.com/gh/electron/notarize)
|
||||
[](https://npm.im/@electron/notarize)
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
npm install @electron/notarize --save-dev
|
||||
```
|
||||
|
||||
## What is app "notarization"?
|
||||
|
||||
From Apple's docs in XCode:
|
||||
|
||||
> A notarized app is a macOS app that was uploaded to Apple for processing before it was distributed.
|
||||
> When you export a notarized app from Xcode, it code signs the app with a Developer ID certificate
|
||||
> and staples a ticket from Apple to the app. The ticket confirms that you previously uploaded the app to Apple.
|
||||
|
||||
> On macOS 10.14 and later, the user can launch notarized apps when Gatekeeper is enabled.
|
||||
> When the user first launches a notarized app, Gatekeeper looks for the app’s ticket online.
|
||||
> If the user is offline, Gatekeeper looks for the ticket that was stapled to the app.
|
||||
|
||||
As macOS 10.15 (Catalina), Apple has made notarization a hard requirement for all applications
|
||||
distributed outside of the Mac App Store. App Store applications do not need to be notarized.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
For notarization, you need the following things:
|
||||
|
||||
1. Xcode 13 or later installed on your Mac.
|
||||
1. An [Apple Developer](https://developer.apple.com/) account.
|
||||
1. [An app-specific password for your ADC account’s Apple ID](https://support.apple.com/HT204397).
|
||||
1. Your app may need to be signed with `hardenedRuntime: true` option, with the `com.apple.security.cs.allow-jit` entitlement.
|
||||
|
||||
> [!NOTE]
|
||||
> If you are using Electron 11 or below, you must add the `com.apple.security.cs.allow-unsigned-executable-memory` entitlement too.
|
||||
> When using version 12+, this entitlement should not be applied as it increases your app's attack surface.
|
||||
|
||||
### Notarization on older macOS versions
|
||||
|
||||
Xcode 13 is available from macOS 11.3, but notarization can be performed on systems down to macOS 10.15
|
||||
(see [TN3147](https://developer.apple.com/documentation/technotes/tn3147-migrating-to-the-latest-notarization-tool#Enable-notarization-on-an-older-version-of-macOS) for more information).
|
||||
|
||||
To achieve this, you can copy notarytool binary from a newer macOS version and provide its path as `notarytoolPath` option.
|
||||
|
||||
## API
|
||||
|
||||
`@electron/notarize` exposes a single `notarize` function that accepts the following parameters:
|
||||
* `appPath` — the absolute path to your codesigned and packaged Electron application.
|
||||
* `notarytoolPath` - String (optional) - Path of the notarytool binary ([more details](#notarization-on-older-macos-versions))
|
||||
* additional options required for authenticating your Apple ID (see below)
|
||||
|
||||
The method returns a void Promise once app notarization is complete. Please note that notarization may take
|
||||
many minutes.
|
||||
|
||||
If the notarization process is unusually log for your application, see Apple Developer's docs to
|
||||
[Avoid long notarization response times and size limits](https://developer.apple.com/documentation/security/notarizing_macos_software_before_distribution/customizing_the_notarization_workflow#3561440).
|
||||
|
||||
### Usage with app-specific password
|
||||
|
||||
You can generate an [app-specific password](https://support.apple.com/en-us/102654) for your Apple ID
|
||||
to notarize your Electron applications.
|
||||
|
||||
This method also requires you to specify the [Team ID](https://developer.apple.com/help/account/manage-your-team/locate-your-team-id/)
|
||||
of the Developer Team you want to notarize under. An Apple ID may be part of multiple Teams.
|
||||
|
||||
```javascript
|
||||
import { notarize } from '@electron/notarize';
|
||||
|
||||
await notarize({
|
||||
appPath,
|
||||
appleId, // Login name of your Apple Developer account
|
||||
appleIdPassword, // App-specific password
|
||||
teamId, // Team ID for your developer team
|
||||
});
|
||||
```
|
||||
|
||||
> [!IMPORTANT]
|
||||
> **Never hard code your app-specific password into your packaging scripts.** Use an environment
|
||||
> variable at a minimum.
|
||||
|
||||
### Usage with App Store Connect API key
|
||||
|
||||
Alternatively, you can also authenticate via JSON Web Token (JWT) with App Store Connect.
|
||||
|
||||
You can obtain an API key from [App Store Connect](https://appstoreconnect.apple.com/access/integrations/api).
|
||||
Create a **Team Key** (not an _Individual Key_) with **App Manager** access.
|
||||
|
||||
Note down the Issuer ID (UUID format) and Key ID (10-character alphanumeric string),
|
||||
and download the `.p8` API key file (`AuthKey_<appleApiKeyId>.p8`).
|
||||
For security purposes, the private key can only be downloaded once.
|
||||
|
||||
Provide the absolute path to your API key as the `appleApiKey` argument.
|
||||
|
||||
```javascript
|
||||
import { notarize } from '@electron/notarize';
|
||||
|
||||
await notarize({
|
||||
appPath,
|
||||
appleApiKey, // Absolute path to API key (e.g. `/path/to/AuthKey_X0X0X0X0X0.p8`)
|
||||
appleApiIssuer, // Issuer ID (e.g. `d5631714-a680-4b4b-8156-b4ed624c0845`)
|
||||
});
|
||||
```
|
||||
|
||||
### Usage with Keychain credentials
|
||||
|
||||
As an alternative to passing authentication options, you can also store your authentication
|
||||
credentials (for both API key and app-specific password strategies) in the macOS Keychain
|
||||
via the `xcrun notarytool` command-line utility.
|
||||
|
||||
This method has the advantage of validating your notarization credentials before submitting
|
||||
your application for notarization.
|
||||
|
||||
For example:
|
||||
|
||||
```sh
|
||||
# App-specific password strategy
|
||||
xcrun notarytool store-credentials "my-app-password-profile"
|
||||
--apple-id "<AppleID>"
|
||||
--team-id <DeveloperTeamID>
|
||||
--password <app_specific_password>
|
||||
```
|
||||
|
||||
```sh
|
||||
# App Store Connect API key strategy
|
||||
xcrun notarytool store-credentials "my-api-key-profile"
|
||||
--key "<PathToAPIKey>"
|
||||
--key-id <KeyID>
|
||||
--issuer <IssuerID>
|
||||
```
|
||||
|
||||
Successful storage of your credentials will look like this:
|
||||
|
||||
```
|
||||
This process stores your credentials securely in the Keychain. You reference these credentials later using a profile name.
|
||||
|
||||
Validating your credentials...
|
||||
Success. Credentials validated.
|
||||
Credentials saved to Keychain.
|
||||
To use them, specify `--keychain-profile "my-api-key-profile"`
|
||||
```
|
||||
|
||||
After successfully storing your credentials, pass the keychain profile name into
|
||||
the `keychainProfile` parameter.
|
||||
|
||||
```javascript
|
||||
import { notarize } from '@electron/notarize';
|
||||
|
||||
await notarize({
|
||||
appPath,
|
||||
keychainProfile,
|
||||
});
|
||||
```
|
||||
## Troubleshooting
|
||||
|
||||
### Debug logging
|
||||
|
||||
[`debug`](https://www.npmjs.com/package/debug) is used to display logs and messages.
|
||||
Run your notarization scripts with the `DEBUG=electron-notarize*` environment variable to log additional
|
||||
debug information from this module.
|
||||
|
||||
### Validating credentials
|
||||
|
||||
When notarizing your application, you may run into issues with validating your notarization
|
||||
credentials.
|
||||
|
||||
```
|
||||
Error: HTTP status code: 401. Invalid credentials. Username or password is incorrect.
|
||||
Use the app-specific password generated at appleid.apple.com. Ensure that all authentication arguments are correct.
|
||||
```
|
||||
|
||||
[Storing your credentials in Keychain](#usage-with-keychain-credentials) will validate your credentials before
|
||||
even GitHub.
|
||||
|
||||
### Validating app notarization
|
||||
|
||||
To validate that notarization worked, you can use the `stapler` command-line utility:
|
||||
|
||||
```sh
|
||||
stapler validate path/to/notarized.app
|
||||
```
|
||||
|
||||
### Apple documentation
|
||||
|
||||
Apple also provides additional debugging documentation on
|
||||
[Resolving common notarization issues](https://developer.apple.com/documentation/security/notarizing_macos_software_before_distribution/resolving_common_notarization_issues).
|
||||
2
desktop-operator/node_modules/@electron/notarize/lib/check-signature.d.ts
generated
vendored
Normal file
2
desktop-operator/node_modules/@electron/notarize/lib/check-signature.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import { NotaryToolNotarizeAppOptions } from './types';
|
||||
export declare function checkSignatures(opts: NotaryToolNotarizeAppOptions): Promise<void>;
|
||||
80
desktop-operator/node_modules/@electron/notarize/lib/check-signature.js
generated
vendored
Normal file
80
desktop-operator/node_modules/@electron/notarize/lib/check-signature.js
generated
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.checkSignatures = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const spawn_1 = require("./spawn");
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const d = (0, debug_1.default)('electron-notarize');
|
||||
const codesignDisplay = (opts) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
const result = yield (0, spawn_1.spawn)('codesign', ['-dv', '-vvvv', '--deep', path.basename(opts.appPath)], {
|
||||
cwd: path.dirname(opts.appPath),
|
||||
});
|
||||
return result;
|
||||
});
|
||||
const codesign = (opts) => __awaiter(void 0, void 0, void 0, function* () {
|
||||
d('attempting to check codesign of app:', opts.appPath);
|
||||
const result = yield (0, spawn_1.spawn)('codesign', ['-vvv', '--deep', '--strict', path.basename(opts.appPath)], {
|
||||
cwd: path.dirname(opts.appPath),
|
||||
});
|
||||
return result;
|
||||
});
|
||||
function checkSignatures(opts) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const fileExt = path.extname(opts.appPath);
|
||||
if (fileExt === '.dmg' || fileExt === '.pkg') {
|
||||
d('skipping codesign check for dmg or pkg file');
|
||||
return;
|
||||
}
|
||||
const [codesignResult, codesignInfo] = yield Promise.all([codesign(opts), codesignDisplay(opts)]);
|
||||
let error = '';
|
||||
if (codesignInfo.code !== 0) {
|
||||
d('codesignInfo failed');
|
||||
error = `Failed to display codesign info on your application with code: ${codesignInfo.code}\n\n${codesignInfo.output}\n`;
|
||||
}
|
||||
if (codesignResult.code !== 0) {
|
||||
d('codesign check failed');
|
||||
error += `Failed to codesign your application with code: ${codesignResult.code}\n\n${codesignResult.output}\n\n${codesignInfo.output}`;
|
||||
}
|
||||
if (error) {
|
||||
throw new Error(error);
|
||||
}
|
||||
d('codesign assess succeeded');
|
||||
});
|
||||
}
|
||||
exports.checkSignatures = checkSignatures;
|
||||
//# sourceMappingURL=check-signature.js.map
|
||||
1
desktop-operator/node_modules/@electron/notarize/lib/check-signature.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/notarize/lib/check-signature.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"check-signature.js","sourceRoot":"","sources":["../src/check-signature.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,2CAA6B;AAE7B,mCAAgC;AAEhC,kDAA0B;AAC1B,MAAM,CAAC,GAAG,IAAA,eAAK,EAAC,mBAAmB,CAAC,CAAC;AAErC,MAAM,eAAe,GAAG,CAAO,IAAkC,EAAE,EAAE;IACnE,MAAM,MAAM,GAAG,MAAM,IAAA,aAAK,EAAC,UAAU,EAAE,CAAC,KAAK,EAAE,OAAO,EAAE,QAAQ,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE;QAC9F,GAAG,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC;KAChC,CAAC,CAAC;IACH,OAAO,MAAM,CAAC;AAChB,CAAC,CAAA,CAAC;AAEF,MAAM,QAAQ,GAAG,CAAO,IAAkC,EAAE,EAAE;IAC5D,CAAC,CAAC,sCAAsC,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;IACxD,MAAM,MAAM,GAAG,MAAM,IAAA,aAAK,EACxB,UAAU,EACV,CAAC,MAAM,EAAE,QAAQ,EAAE,UAAU,EAAE,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAC3D;QACE,GAAG,EAAE,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC;KAChC,CACF,CAAC;IAEF,OAAO,MAAM,CAAC;AAChB,CAAC,CAAA,CAAC;AACF,SAAsB,eAAe,CAAC,IAAkC;;QACtE,MAAM,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;QAC3C,IAAI,OAAO,KAAK,MAAM,IAAI,OAAO,KAAK,MAAM,EAAE;YAC5C,CAAC,CAAC,6CAA6C,CAAC,CAAC;YACjD,OAAO;SACR;QACD,MAAM,CAAC,cAAc,EAAE,YAAY,CAAC,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAClG,IAAI,KAAK,GAAG,EAAE,CAAC;QAEf,IAAI,YAAY,CAAC,IAAI,KAAK,CAAC,EAAE;YAC3B,CAAC,CAAC,qBAAqB,CAAC,CAAC;YACzB,KAAK,GAAG,kEAAkE,YAAY,CAAC,IAAI,OAAO,YAAY,CAAC,MAAM,IAAI,CAAC;SAC3H;QACD,IAAI,cAAc,CAAC,IAAI,KAAK,CAAC,EAAE;YAC7B,CAAC,CAAC,uBAAuB,CAAC,CAAC;YAC3B,KAAK,IAAI,kDAAkD,cAAc,CAAC,IAAI,OAAO,cAAc,CAAC,MAAM,OAAO,YAAY,CAAC,MAAM,EAAE,CAAC;SACxI;QAED,IAAI,KAAK,EAAE;YACT,MAAM,IAAI,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;QACD,CAAC,CAAC,2BAA2B,CAAC,CAAC;IACjC,CAAC;CAAA;AAtBD,0CAsBC"}
|
||||
13
desktop-operator/node_modules/@electron/notarize/lib/helpers.d.ts
generated
vendored
Normal file
13
desktop-operator/node_modules/@electron/notarize/lib/helpers.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
export declare function withTempDir<T>(fn: (dir: string) => Promise<T>): Promise<T>;
|
||||
export declare function makeSecret(s: string): string;
|
||||
export declare function isSecret(s: string): boolean;
|
||||
export interface NotarizationInfo {
|
||||
uuid: string;
|
||||
date: Date;
|
||||
status: 'invalid' | 'in progress' | 'success';
|
||||
logFileUrl: string | null;
|
||||
statusCode?: 0 | 2;
|
||||
statusMessage?: string;
|
||||
}
|
||||
export declare function parseNotarizationInfo(info: string): NotarizationInfo;
|
||||
export declare function delay(ms: number): Promise<void>;
|
||||
106
desktop-operator/node_modules/@electron/notarize/lib/helpers.js
generated
vendored
Normal file
106
desktop-operator/node_modules/@electron/notarize/lib/helpers.js
generated
vendored
Normal file
@@ -0,0 +1,106 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.delay = exports.parseNotarizationInfo = exports.isSecret = exports.makeSecret = exports.withTempDir = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const fs = __importStar(require("fs-extra"));
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const d = (0, debug_1.default)('electron-notarize:helpers');
|
||||
function withTempDir(fn) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
const dir = yield fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-notarize-'));
|
||||
d('doing work inside temp dir:', dir);
|
||||
let result;
|
||||
try {
|
||||
result = yield fn(dir);
|
||||
}
|
||||
catch (err) {
|
||||
d('work failed');
|
||||
yield fs.remove(dir);
|
||||
throw err;
|
||||
}
|
||||
d('work succeeded');
|
||||
yield fs.remove(dir);
|
||||
return result;
|
||||
});
|
||||
}
|
||||
exports.withTempDir = withTempDir;
|
||||
class Secret {
|
||||
constructor(value) {
|
||||
this.value = value;
|
||||
}
|
||||
toString() {
|
||||
return this.value;
|
||||
}
|
||||
inspect() {
|
||||
return '******';
|
||||
}
|
||||
}
|
||||
function makeSecret(s) {
|
||||
return new Secret(s);
|
||||
}
|
||||
exports.makeSecret = makeSecret;
|
||||
function isSecret(s) {
|
||||
return s instanceof Secret;
|
||||
}
|
||||
exports.isSecret = isSecret;
|
||||
function parseNotarizationInfo(info) {
|
||||
const out = {};
|
||||
const matchToProperty = (key, r, modifier) => {
|
||||
const exec = r.exec(info);
|
||||
if (exec) {
|
||||
out[key] = modifier ? modifier(exec[1]) : exec[1];
|
||||
}
|
||||
};
|
||||
matchToProperty('uuid', /\n *RequestUUID: (.+?)\n/);
|
||||
matchToProperty('date', /\n *Date: (.+?)\n/, d => new Date(d));
|
||||
matchToProperty('status', /\n *Status: (.+?)\n/);
|
||||
matchToProperty('logFileUrl', /\n *LogFileURL: (.+?)\n/);
|
||||
matchToProperty('statusCode', /\n *Status Code: (.+?)\n/, n => parseInt(n, 10));
|
||||
matchToProperty('statusMessage', /\n *Status Message: (.+?)\n/);
|
||||
if (out.logFileUrl === '(null)') {
|
||||
out.logFileUrl = null;
|
||||
}
|
||||
return out;
|
||||
}
|
||||
exports.parseNotarizationInfo = parseNotarizationInfo;
|
||||
function delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
exports.delay = delay;
|
||||
//# sourceMappingURL=helpers.js.map
|
||||
1
desktop-operator/node_modules/@electron/notarize/lib/helpers.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/notarize/lib/helpers.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"helpers.js","sourceRoot":"","sources":["../src/helpers.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,kDAA0B;AAC1B,6CAA+B;AAC/B,uCAAyB;AACzB,2CAA6B;AAE7B,MAAM,CAAC,GAAG,IAAA,eAAK,EAAC,2BAA2B,CAAC,CAAC;AAE7C,SAAsB,WAAW,CAAI,EAA+B;;QAClE,MAAM,GAAG,GAAG,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,MAAM,EAAE,EAAE,oBAAoB,CAAC,CAAC,CAAC;QAC9E,CAAC,CAAC,6BAA6B,EAAE,GAAG,CAAC,CAAC;QACtC,IAAI,MAAS,CAAC;QACd,IAAI;YACF,MAAM,GAAG,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC;SACxB;QAAC,OAAO,GAAG,EAAE;YACZ,CAAC,CAAC,aAAa,CAAC,CAAC;YACjB,MAAM,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;YACrB,MAAM,GAAG,CAAC;SACX;QACD,CAAC,CAAC,gBAAgB,CAAC,CAAC;QACpB,MAAM,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;QACrB,OAAO,MAAM,CAAC;IAChB,CAAC;CAAA;AAdD,kCAcC;AAED,MAAM,MAAM;IACV,YAAoB,KAAa;QAAb,UAAK,GAAL,KAAK,CAAQ;IAAG,CAAC;IAErC,QAAQ;QACN,OAAO,IAAI,CAAC,KAAK,CAAC;IACpB,CAAC;IACD,OAAO;QACL,OAAO,QAAQ,CAAC;IAClB,CAAC;CACF;AAED,SAAgB,UAAU,CAAC,CAAS;IAClC,OAAQ,IAAI,MAAM,CAAC,CAAC,CAAmB,CAAC;AAC1C,CAAC;AAFD,gCAEC;AAED,SAAgB,QAAQ,CAAC,CAAS;IAChC,OAAQ,CAAS,YAAY,MAAM,CAAC;AACtC,CAAC;AAFD,4BAEC;AAYD,SAAgB,qBAAqB,CAAC,IAAY;IAChD,MAAM,GAAG,GAAG,EAAS,CAAC;IACtB,MAAM,eAAe,GAAG,CACtB,GAAM,EACN,CAAS,EACT,QAA6C,EAC7C,EAAE;QACF,MAAM,IAAI,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC1B,IAAI,IAAI,EAAE;YACR,GAAG,CAAC,GAAG,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnD;IACH,CAAC,CAAC;IACF,eAAe,CAAC,MAAM,EAAE,0BAA0B,CAAC,CAAC;IACpD,eAAe,CAAC,MAAM,EAAE,mBAAmB,EAAE,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;IAC/D,eAAe,CAAC,QAAQ,EAAE,qBAAqB,CAAC,CAAC;IACjD,eAAe,CAAC,YAAY,EAAE,yBAAyB,CAAC,CAAC;IACzD,eAAe,CAAC,YAAY,EAAE,0BAA0B,EAAE,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAQ,CAAC,CAAC;IACvF,eAAe,CAAC,eAAe,EAAE,6BAA6B,CAAC,CAAC;IAEhE,IAAI,GAAG,CAAC,UAAU,KAAK,QAAQ,EAAE;QAC/B,GAAG,CAAC,UAAU,GAAG,IAAI,CAAC;KACvB;IAED,OAAO,GAAG,CAAC;AACb,CAAC;AAxBD,sDAwBC;AAED,SAAgB,KAAK,CAAC,EAAU;IAC9B,OAAO,IAAI,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC,UAAU,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC,CAAC;AACzD,CAAC;AAFD,sBAEC"}
|
||||
20
desktop-operator/node_modules/@electron/notarize/lib/index.d.ts
generated
vendored
Normal file
20
desktop-operator/node_modules/@electron/notarize/lib/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { NotarizeOptions, NotarizeOptionsLegacy, NotarizeOptionsNotaryTool } from './types';
|
||||
export { NotarizeOptions };
|
||||
export { validateNotaryToolAuthorizationArgs as validateAuthorizationArgs } from './validate-args';
|
||||
/**
|
||||
* Sends your app to Apple for notarization with `notarytool` and staples a successful
|
||||
* notarization result to the app bundle. This includes your {@link NotaryToolNotarizeAppOptions.appPath | appPath}
|
||||
* as well as one of three valid credential authentication strategies.
|
||||
*
|
||||
* See {@link NotaryToolCredentials} for authentication options.
|
||||
*
|
||||
* @category Core
|
||||
* @param args Options for notarization
|
||||
* @returns The Promise resolves once notarization is complete. Note that this may take a few minutes.
|
||||
*/
|
||||
declare function notarize(args: NotarizeOptionsNotaryTool): Promise<void>;
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
declare function notarize(args: NotarizeOptionsLegacy): Promise<void>;
|
||||
export { notarize };
|
||||
53
desktop-operator/node_modules/@electron/notarize/lib/index.js
generated
vendored
Normal file
53
desktop-operator/node_modules/@electron/notarize/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
"use strict";
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.notarize = exports.validateAuthorizationArgs = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
const promise_retry_1 = __importDefault(require("promise-retry"));
|
||||
const check_signature_1 = require("./check-signature");
|
||||
const notarytool_1 = require("./notarytool");
|
||||
const staple_1 = require("./staple");
|
||||
const d = (0, debug_1.default)('electron-notarize');
|
||||
var validate_args_1 = require("./validate-args");
|
||||
Object.defineProperty(exports, "validateAuthorizationArgs", { enumerable: true, get: function () { return validate_args_1.validateNotaryToolAuthorizationArgs; } });
|
||||
function notarize(_a) {
|
||||
var { appPath } = _a, otherOptions = __rest(_a, ["appPath"]);
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
if (otherOptions.tool === 'legacy') {
|
||||
throw new Error('Notarization with the legacy altool system was decommisioned as of November 2023');
|
||||
}
|
||||
yield (0, check_signature_1.checkSignatures)({ appPath });
|
||||
d('notarizing using notarytool');
|
||||
if (!(yield (0, notarytool_1.isNotaryToolAvailable)())) {
|
||||
throw new Error('notarytool is not available, you must be on at least Xcode 13 or provide notarytoolPath');
|
||||
}
|
||||
yield (0, notarytool_1.notarizeAndWaitForNotaryTool)(Object.assign({ appPath }, otherOptions));
|
||||
yield (0, promise_retry_1.default)(() => (0, staple_1.stapleApp)({ appPath }), {
|
||||
retries: 3,
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.notarize = notarize;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
desktop-operator/node_modules/@electron/notarize/lib/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/notarize/lib/index.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,kDAA0B;AAC1B,kEAAkC;AAElC,uDAAoD;AACpD,6CAAmF;AACnF,qCAAqC;AAQrC,MAAM,CAAC,GAAG,IAAA,eAAK,EAAC,mBAAmB,CAAC,CAAC;AAIrC,iDAAmG;AAA1F,0HAAA,mCAAmC,OAA6B;AAmBzE,SAAe,QAAQ,CAAC,EAA6C;QAA7C,EAAE,OAAO,OAAoC,EAA/B,YAAY,cAA1B,WAA4B,CAAF;;QAChD,IAAI,YAAY,CAAC,IAAI,KAAK,QAAQ,EAAE;YAClC,MAAM,IAAI,KAAK,CACb,kFAAkF,CACnF,CAAC;SACH;QAED,MAAM,IAAA,iCAAe,EAAC,EAAE,OAAO,EAAE,CAAC,CAAC;QAEnC,CAAC,CAAC,6BAA6B,CAAC,CAAC;QACjC,IAAI,CAAC,CAAC,MAAM,IAAA,kCAAqB,GAAE,CAAC,EAAE;YACpC,MAAM,IAAI,KAAK,CACb,yFAAyF,CAC1F,CAAC;SACH;QAED,MAAM,IAAA,yCAA4B,EAAC,gBACjC,OAAO,IACJ,YAAY,CACU,CAAC,CAAC;QAE7B,MAAM,IAAA,uBAAK,EAAC,GAAG,EAAE,CAAC,IAAA,kBAAS,EAAC,EAAE,OAAO,EAAE,CAAC,EAAE;YACxC,OAAO,EAAE,CAAC;SACX,CAAC,CAAC;;CACJ;AAEQ,4BAAQ"}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user