main funcions fixes
This commit is contained in:
21
desktop-operator/node_modules/@electron/universal/LICENSE
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/universal/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) Contributors to the Electron project
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
37
desktop-operator/node_modules/@electron/universal/README.md
generated
vendored
Normal file
37
desktop-operator/node_modules/@electron/universal/README.md
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
# @electron/universal
|
||||
|
||||
> Create universal macOS Electron applications
|
||||
|
||||
[](https://circleci.com/gh/electron/universal)
|
||||
[](https://npm.im/@electron/universal)
|
||||
|
||||
|
||||
## Usage
|
||||
|
||||
```typescript
|
||||
import { makeUniversalApp } from '@electron/universal';
|
||||
|
||||
await makeUniversalApp({
|
||||
x64AppPath: 'path/to/App_x64.app',
|
||||
arm64AppPath: 'path/to/App_arm64.app',
|
||||
outAppPath: 'path/to/App_universal.app',
|
||||
});
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
#### The app is twice as big now, why?
|
||||
|
||||
Well, a Universal app isn't anything magical. It is literally the x64 app and
|
||||
the arm64 app glued together into a single application. It's twice as big
|
||||
because it contains two apps in one.
|
||||
|
||||
#### What about native modules?
|
||||
|
||||
The way `@electron/universal` works today means you don't need to worry about
|
||||
things like building universal versions of your native modules. As long as
|
||||
your x64 and arm64 apps work in isolation the Universal app will work as well.
|
||||
|
||||
#### How do I build my app for Apple silicon in the first place?
|
||||
|
||||
Check out the [Electron Apple silicon blog post](https://www.electronjs.org/blog/apple-silicon)
|
||||
16
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.d.ts
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export declare enum AsarMode {
|
||||
NO_ASAR = 0,
|
||||
HAS_ASAR = 1
|
||||
}
|
||||
export type MergeASARsOptions = {
|
||||
x64AsarPath: string;
|
||||
arm64AsarPath: string;
|
||||
outputAsarPath: string;
|
||||
singleArchFiles?: string;
|
||||
};
|
||||
export declare const detectAsarMode: (appPath: string) => Promise<AsarMode>;
|
||||
export declare const generateAsarIntegrity: (asarPath: string) => {
|
||||
algorithm: "SHA256";
|
||||
hash: string;
|
||||
};
|
||||
export declare const mergeASARs: ({ x64AsarPath, arm64AsarPath, outputAsarPath, singleArchFiles, }: MergeASARsOptions) => Promise<void>;
|
||||
175
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.js
generated
vendored
Normal file
175
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.mergeASARs = exports.generateAsarIntegrity = exports.detectAsarMode = exports.AsarMode = void 0;
|
||||
const asar_1 = __importDefault(require("@electron/asar"));
|
||||
const child_process_1 = require("child_process");
|
||||
const crypto_1 = __importDefault(require("crypto"));
|
||||
const fs_extra_1 = __importDefault(require("fs-extra"));
|
||||
const path_1 = __importDefault(require("path"));
|
||||
const minimatch_1 = require("minimatch");
|
||||
const os_1 = __importDefault(require("os"));
|
||||
const debug_1 = require("./debug");
|
||||
const LIPO = 'lipo';
|
||||
var AsarMode;
|
||||
(function (AsarMode) {
|
||||
AsarMode[AsarMode["NO_ASAR"] = 0] = "NO_ASAR";
|
||||
AsarMode[AsarMode["HAS_ASAR"] = 1] = "HAS_ASAR";
|
||||
})(AsarMode || (exports.AsarMode = AsarMode = {}));
|
||||
// See: https://github.com/apple-opensource-mirror/llvmCore/blob/0c60489d96c87140db9a6a14c6e82b15f5e5d252/include/llvm/Object/MachOFormat.h#L108-L112
|
||||
const MACHO_MAGIC = new Set([
|
||||
// 32-bit Mach-O
|
||||
0xfeedface, 0xcefaedfe,
|
||||
// 64-bit Mach-O
|
||||
0xfeedfacf, 0xcffaedfe,
|
||||
]);
|
||||
const MACHO_UNIVERSAL_MAGIC = new Set([
|
||||
// universal
|
||||
0xcafebabe, 0xbebafeca,
|
||||
]);
|
||||
const detectAsarMode = async (appPath) => {
|
||||
(0, debug_1.d)('checking asar mode of', appPath);
|
||||
const asarPath = path_1.default.resolve(appPath, 'Contents', 'Resources', 'app.asar');
|
||||
if (!(await fs_extra_1.default.pathExists(asarPath))) {
|
||||
(0, debug_1.d)('determined no asar');
|
||||
return AsarMode.NO_ASAR;
|
||||
}
|
||||
(0, debug_1.d)('determined has asar');
|
||||
return AsarMode.HAS_ASAR;
|
||||
};
|
||||
exports.detectAsarMode = detectAsarMode;
|
||||
const generateAsarIntegrity = (asarPath) => {
|
||||
return {
|
||||
algorithm: 'SHA256',
|
||||
hash: crypto_1.default
|
||||
.createHash('SHA256')
|
||||
.update(asar_1.default.getRawHeader(asarPath).headerString)
|
||||
.digest('hex'),
|
||||
};
|
||||
};
|
||||
exports.generateAsarIntegrity = generateAsarIntegrity;
|
||||
function toRelativePath(file) {
|
||||
return file.replace(/^\//, '');
|
||||
}
|
||||
function isDirectory(a, file) {
|
||||
return Boolean('files' in asar_1.default.statFile(a, file));
|
||||
}
|
||||
function checkSingleArch(archive, file, allowList) {
|
||||
if (allowList === undefined || !(0, minimatch_1.minimatch)(file, allowList, { matchBase: true })) {
|
||||
throw new Error(`Detected unique file "${file}" in "${archive}" not covered by ` +
|
||||
`allowList rule: "${allowList}"`);
|
||||
}
|
||||
}
|
||||
const mergeASARs = async ({ x64AsarPath, arm64AsarPath, outputAsarPath, singleArchFiles, }) => {
|
||||
(0, debug_1.d)(`merging ${x64AsarPath} and ${arm64AsarPath}`);
|
||||
const x64Files = new Set(asar_1.default.listPackage(x64AsarPath).map(toRelativePath));
|
||||
const arm64Files = new Set(asar_1.default.listPackage(arm64AsarPath).map(toRelativePath));
|
||||
//
|
||||
// Build set of unpacked directories and files
|
||||
//
|
||||
const unpackedFiles = new Set();
|
||||
function buildUnpacked(a, fileList) {
|
||||
for (const file of fileList) {
|
||||
const stat = asar_1.default.statFile(a, file);
|
||||
if (!('unpacked' in stat) || !stat.unpacked) {
|
||||
continue;
|
||||
}
|
||||
if ('files' in stat) {
|
||||
continue;
|
||||
}
|
||||
unpackedFiles.add(file);
|
||||
}
|
||||
}
|
||||
buildUnpacked(x64AsarPath, x64Files);
|
||||
buildUnpacked(arm64AsarPath, arm64Files);
|
||||
//
|
||||
// Build list of files/directories unique to each asar
|
||||
//
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
checkSingleArch(x64AsarPath, file, singleArchFiles);
|
||||
}
|
||||
}
|
||||
const arm64Unique = [];
|
||||
for (const file of arm64Files) {
|
||||
if (!x64Files.has(file)) {
|
||||
checkSingleArch(arm64AsarPath, file, singleArchFiles);
|
||||
arm64Unique.push(file);
|
||||
}
|
||||
}
|
||||
//
|
||||
// Find common bindings with different content
|
||||
//
|
||||
const commonBindings = [];
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
continue;
|
||||
}
|
||||
// Skip directories
|
||||
if (isDirectory(x64AsarPath, file)) {
|
||||
continue;
|
||||
}
|
||||
const x64Content = asar_1.default.extractFile(x64AsarPath, file);
|
||||
const arm64Content = asar_1.default.extractFile(arm64AsarPath, file);
|
||||
if (x64Content.compare(arm64Content) === 0) {
|
||||
continue;
|
||||
}
|
||||
if (MACHO_UNIVERSAL_MAGIC.has(x64Content.readUInt32LE(0)) &&
|
||||
MACHO_UNIVERSAL_MAGIC.has(arm64Content.readUInt32LE(0))) {
|
||||
continue;
|
||||
}
|
||||
if (!MACHO_MAGIC.has(x64Content.readUInt32LE(0))) {
|
||||
throw new Error(`Can't reconcile two non-macho files ${file}`);
|
||||
}
|
||||
commonBindings.push(file);
|
||||
}
|
||||
//
|
||||
// Extract both
|
||||
//
|
||||
const x64Dir = await fs_extra_1.default.mkdtemp(path_1.default.join(os_1.default.tmpdir(), 'x64-'));
|
||||
const arm64Dir = await fs_extra_1.default.mkdtemp(path_1.default.join(os_1.default.tmpdir(), 'arm64-'));
|
||||
try {
|
||||
(0, debug_1.d)(`extracting ${x64AsarPath} to ${x64Dir}`);
|
||||
asar_1.default.extractAll(x64AsarPath, x64Dir);
|
||||
(0, debug_1.d)(`extracting ${arm64AsarPath} to ${arm64Dir}`);
|
||||
asar_1.default.extractAll(arm64AsarPath, arm64Dir);
|
||||
for (const file of arm64Unique) {
|
||||
const source = path_1.default.resolve(arm64Dir, file);
|
||||
const destination = path_1.default.resolve(x64Dir, file);
|
||||
if (isDirectory(arm64AsarPath, file)) {
|
||||
(0, debug_1.d)(`creating unique directory: ${file}`);
|
||||
await fs_extra_1.default.mkdirp(destination);
|
||||
continue;
|
||||
}
|
||||
(0, debug_1.d)(`xopying unique file: ${file}`);
|
||||
await fs_extra_1.default.mkdirp(path_1.default.dirname(destination));
|
||||
await fs_extra_1.default.copy(source, destination);
|
||||
}
|
||||
for (const binding of commonBindings) {
|
||||
const source = await fs_extra_1.default.realpath(path_1.default.resolve(arm64Dir, binding));
|
||||
const destination = await fs_extra_1.default.realpath(path_1.default.resolve(x64Dir, binding));
|
||||
(0, debug_1.d)(`merging binding: ${binding}`);
|
||||
(0, child_process_1.execFileSync)(LIPO, [source, destination, '-create', '-output', destination]);
|
||||
}
|
||||
(0, debug_1.d)(`creating archive at ${outputAsarPath}`);
|
||||
const resolvedUnpack = Array.from(unpackedFiles).map((file) => path_1.default.join(x64Dir, file));
|
||||
let unpack;
|
||||
if (resolvedUnpack.length > 1) {
|
||||
unpack = `{${resolvedUnpack.join(',')}}`;
|
||||
}
|
||||
else if (resolvedUnpack.length === 1) {
|
||||
unpack = resolvedUnpack[0];
|
||||
}
|
||||
await asar_1.default.createPackageWithOptions(x64Dir, outputAsarPath, {
|
||||
unpack,
|
||||
});
|
||||
(0, debug_1.d)('done merging');
|
||||
}
|
||||
finally {
|
||||
await Promise.all([fs_extra_1.default.remove(x64Dir), fs_extra_1.default.remove(arm64Dir)]);
|
||||
}
|
||||
};
|
||||
exports.mergeASARs = mergeASARs;
|
||||
//# sourceMappingURL=asar-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/asar-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.d.ts
generated
vendored
Normal file
2
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import debug from 'debug';
|
||||
export declare const d: debug.Debugger;
|
||||
9
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.js
generated
vendored
Normal file
9
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.js
generated
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.d = void 0;
|
||||
const debug_1 = __importDefault(require("debug"));
|
||||
exports.d = (0, debug_1.default)('electron-universal');
|
||||
//# sourceMappingURL=debug.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/debug.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":";;;;;;AAAA,kDAA0B;AAEb,QAAA,CAAC,GAAG,IAAA,eAAK,EAAC,oBAAoB,CAAC,CAAC"}
|
||||
16
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.d.ts
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export declare enum AppFileType {
|
||||
MACHO = 0,
|
||||
PLAIN = 1,
|
||||
INFO_PLIST = 2,
|
||||
SNAPSHOT = 3,
|
||||
APP_CODE = 4
|
||||
}
|
||||
export type AppFile = {
|
||||
relativePath: string;
|
||||
type: AppFileType;
|
||||
};
|
||||
/**
|
||||
*
|
||||
* @param appPath Path to the application
|
||||
*/
|
||||
export declare const getAllAppFiles: (appPath: string) => Promise<AppFile[]>;
|
||||
95
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.js
generated
vendored
Normal file
95
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getAllAppFiles = exports.AppFileType = void 0;
|
||||
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
|
||||
const fs = __importStar(require("fs-extra"));
|
||||
const path = __importStar(require("path"));
|
||||
const MACHO_PREFIX = 'Mach-O ';
|
||||
var AppFileType;
|
||||
(function (AppFileType) {
|
||||
AppFileType[AppFileType["MACHO"] = 0] = "MACHO";
|
||||
AppFileType[AppFileType["PLAIN"] = 1] = "PLAIN";
|
||||
AppFileType[AppFileType["INFO_PLIST"] = 2] = "INFO_PLIST";
|
||||
AppFileType[AppFileType["SNAPSHOT"] = 3] = "SNAPSHOT";
|
||||
AppFileType[AppFileType["APP_CODE"] = 4] = "APP_CODE";
|
||||
})(AppFileType || (exports.AppFileType = AppFileType = {}));
|
||||
/**
|
||||
*
|
||||
* @param appPath Path to the application
|
||||
*/
|
||||
const getAllAppFiles = async (appPath) => {
|
||||
const files = [];
|
||||
const visited = new Set();
|
||||
const traverse = async (p) => {
|
||||
p = await fs.realpath(p);
|
||||
if (visited.has(p))
|
||||
return;
|
||||
visited.add(p);
|
||||
const info = await fs.stat(p);
|
||||
if (info.isSymbolicLink())
|
||||
return;
|
||||
if (info.isFile()) {
|
||||
let fileType = AppFileType.PLAIN;
|
||||
var fileOutput = '';
|
||||
try {
|
||||
fileOutput = await (0, cross_spawn_promise_1.spawn)('file', ['--brief', '--no-pad', p]);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof cross_spawn_promise_1.ExitCodeError) {
|
||||
/* silently accept error codes from "file" */
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (p.includes('app.asar')) {
|
||||
fileType = AppFileType.APP_CODE;
|
||||
}
|
||||
else if (fileOutput.startsWith(MACHO_PREFIX)) {
|
||||
fileType = AppFileType.MACHO;
|
||||
}
|
||||
else if (p.endsWith('.bin')) {
|
||||
fileType = AppFileType.SNAPSHOT;
|
||||
}
|
||||
else if (path.basename(p) === 'Info.plist') {
|
||||
fileType = AppFileType.INFO_PLIST;
|
||||
}
|
||||
files.push({
|
||||
relativePath: path.relative(appPath, p),
|
||||
type: fileType,
|
||||
});
|
||||
}
|
||||
if (info.isDirectory()) {
|
||||
for (const child of await fs.readdir(p)) {
|
||||
await traverse(path.resolve(p, child));
|
||||
}
|
||||
}
|
||||
};
|
||||
await traverse(appPath);
|
||||
return files;
|
||||
};
|
||||
exports.getAllAppFiles = getAllAppFiles;
|
||||
//# sourceMappingURL=file-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/file-utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"file-utils.js","sourceRoot":"","sources":["../../src/file-utils.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,qEAAmE;AACnE,6CAA+B;AAC/B,2CAA6B;AAE7B,MAAM,YAAY,GAAG,SAAS,CAAC;AAE/B,IAAY,WAMX;AAND,WAAY,WAAW;IACrB,+CAAK,CAAA;IACL,+CAAK,CAAA;IACL,yDAAU,CAAA;IACV,qDAAQ,CAAA;IACR,qDAAQ,CAAA;AACV,CAAC,EANW,WAAW,2BAAX,WAAW,QAMtB;AAOD;;;GAGG;AACI,MAAM,cAAc,GAAG,KAAK,EAAE,OAAe,EAAsB,EAAE;IAC1E,MAAM,KAAK,GAAc,EAAE,CAAC;IAE5B,MAAM,OAAO,GAAG,IAAI,GAAG,EAAU,CAAC;IAClC,MAAM,QAAQ,GAAG,KAAK,EAAE,CAAS,EAAE,EAAE;QACnC,CAAC,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;YAAE,OAAO;QAC3B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEf,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAC9B,IAAI,IAAI,CAAC,cAAc,EAAE;YAAE,OAAO;QAClC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,QAAQ,GAAG,WAAW,CAAC,KAAK,CAAC;YAEjC,IAAI,UAAU,GAAG,EAAE,CAAC;YACpB,IAAI;gBACF,UAAU,GAAG,MAAM,IAAA,2BAAK,EAAC,MAAM,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;aAC9D;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,CAAC,YAAY,mCAAa,EAAE;oBAC9B,6CAA6C;iBAC9C;qBAAM;oBACL,MAAM,CAAC,CAAC;iBACT;aACF;YACD,IAAI,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;gBAC1B,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC;aACjC;iBAAM,IAAI,UAAU,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;gBAC9C,QAAQ,GAAG,WAAW,CAAC,KAAK,CAAC;aAC9B;iBAAM,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;gBAC7B,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC;aACjC;iBAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,YAAY,EAAE;gBAC5C,QAAQ,GAAG,WAAW,CAAC,UAAU,CAAC;aACnC;YAED,KAAK,CAAC,IAAI,CAAC;gBACT,YAAY,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;gBACvC,IAAI,EAAE,QAAQ;aACf,CAAC,CAAC;SACJ;QAED,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE;YACtB,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;gBACvC,MAAM,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;aACxC;SACF;IACH,CAAC,CAAC;IACF,MAAM,QAAQ,CAAC,OAAO,CAAC,CAAC;IAExB,OAAO,KAAK,CAAC;AACf,CAAC,CAAC;AAjDW,QAAA,cAAc,kBAiDzB"}
|
||||
37
desktop-operator/node_modules/@electron/universal/dist/cjs/index.d.ts
generated
vendored
Normal file
37
desktop-operator/node_modules/@electron/universal/dist/cjs/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
export type MakeUniversalOpts = {
|
||||
/**
|
||||
* Absolute file system path to the x64 version of your application. E.g. /Foo/bar/MyApp_x64.app
|
||||
*/
|
||||
x64AppPath: string;
|
||||
/**
|
||||
* Absolute file system path to the arm64 version of your application. E.g. /Foo/bar/MyApp_arm64.app
|
||||
*/
|
||||
arm64AppPath: string;
|
||||
/**
|
||||
* Absolute file system path you want the universal app to be written to. E.g. /Foo/var/MyApp_universal.app
|
||||
*
|
||||
* If this file exists it will be overwritten ONLY if "force" is set to true
|
||||
*/
|
||||
outAppPath: string;
|
||||
/**
|
||||
* Forcefully overwrite any existing files that are in the way of generating the universal application
|
||||
*/
|
||||
force?: boolean;
|
||||
/**
|
||||
* Merge x64 and arm64 ASARs into one.
|
||||
*/
|
||||
mergeASARs?: boolean;
|
||||
/**
|
||||
* Minimatch pattern of paths that are allowed to be present in one of the ASAR files, but not in the other.
|
||||
*/
|
||||
singleArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of binaries that are expected to be the same x64 binary in both of the ASAR files.
|
||||
*/
|
||||
x64ArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of paths that should not receive an injected ElectronAsarIntegrity value
|
||||
*/
|
||||
infoPlistsToIgnore?: string;
|
||||
};
|
||||
export declare const makeUniversalApp: (opts: MakeUniversalOpts) => Promise<void>;
|
||||
262
desktop-operator/node_modules/@electron/universal/dist/cjs/index.js
generated
vendored
Normal file
262
desktop-operator/node_modules/@electron/universal/dist/cjs/index.js
generated
vendored
Normal file
@@ -0,0 +1,262 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.makeUniversalApp = void 0;
|
||||
const cross_spawn_promise_1 = require("@malept/cross-spawn-promise");
|
||||
const asar = __importStar(require("@electron/asar"));
|
||||
const fs = __importStar(require("fs-extra"));
|
||||
const minimatch_1 = require("minimatch");
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const plist = __importStar(require("plist"));
|
||||
const dircompare = __importStar(require("dir-compare"));
|
||||
const file_utils_1 = require("./file-utils");
|
||||
const asar_utils_1 = require("./asar-utils");
|
||||
const sha_1 = require("./sha");
|
||||
const debug_1 = require("./debug");
|
||||
const dupedFiles = (files) => files.filter((f) => f.type !== file_utils_1.AppFileType.SNAPSHOT && f.type !== file_utils_1.AppFileType.APP_CODE);
|
||||
const makeUniversalApp = async (opts) => {
|
||||
(0, debug_1.d)('making a universal app with options', opts);
|
||||
if (process.platform !== 'darwin')
|
||||
throw new Error('@electron/universal is only supported on darwin platforms');
|
||||
if (!opts.x64AppPath || !path.isAbsolute(opts.x64AppPath))
|
||||
throw new Error('Expected opts.x64AppPath to be an absolute path but it was not');
|
||||
if (!opts.arm64AppPath || !path.isAbsolute(opts.arm64AppPath))
|
||||
throw new Error('Expected opts.arm64AppPath to be an absolute path but it was not');
|
||||
if (!opts.outAppPath || !path.isAbsolute(opts.outAppPath))
|
||||
throw new Error('Expected opts.outAppPath to be an absolute path but it was not');
|
||||
if (await fs.pathExists(opts.outAppPath)) {
|
||||
(0, debug_1.d)('output path exists already');
|
||||
if (!opts.force) {
|
||||
throw new Error(`The out path "${opts.outAppPath}" already exists and force is not set to true`);
|
||||
}
|
||||
else {
|
||||
(0, debug_1.d)('overwriting existing application because force == true');
|
||||
await fs.remove(opts.outAppPath);
|
||||
}
|
||||
}
|
||||
const x64AsarMode = await (0, asar_utils_1.detectAsarMode)(opts.x64AppPath);
|
||||
const arm64AsarMode = await (0, asar_utils_1.detectAsarMode)(opts.arm64AppPath);
|
||||
(0, debug_1.d)('detected x64AsarMode =', x64AsarMode);
|
||||
(0, debug_1.d)('detected arm64AsarMode =', arm64AsarMode);
|
||||
if (x64AsarMode !== arm64AsarMode)
|
||||
throw new Error('Both the x64 and arm64 versions of your application need to have been built with the same asar settings (enabled vs disabled)');
|
||||
const tmpDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-universal-'));
|
||||
(0, debug_1.d)('building universal app in', tmpDir);
|
||||
try {
|
||||
(0, debug_1.d)('copying x64 app as starter template');
|
||||
const tmpApp = path.resolve(tmpDir, 'Tmp.app');
|
||||
await (0, cross_spawn_promise_1.spawn)('cp', ['-R', opts.x64AppPath, tmpApp]);
|
||||
const uniqueToX64 = [];
|
||||
const uniqueToArm64 = [];
|
||||
const x64Files = await (0, file_utils_1.getAllAppFiles)(await fs.realpath(tmpApp));
|
||||
const arm64Files = await (0, file_utils_1.getAllAppFiles)(await fs.realpath(opts.arm64AppPath));
|
||||
for (const file of dupedFiles(x64Files)) {
|
||||
if (!arm64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToX64.push(file.relativePath);
|
||||
}
|
||||
for (const file of dupedFiles(arm64Files)) {
|
||||
if (!x64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToArm64.push(file.relativePath);
|
||||
}
|
||||
if (uniqueToX64.length !== 0 || uniqueToArm64.length !== 0) {
|
||||
(0, debug_1.d)('some files were not in both builds, aborting');
|
||||
console.error({
|
||||
uniqueToX64,
|
||||
uniqueToArm64,
|
||||
});
|
||||
throw new Error('While trying to merge mach-o files across your apps we found a mismatch, the number of mach-o files is not the same between the arm64 and x64 builds');
|
||||
}
|
||||
for (const file of x64Files.filter((f) => f.type === file_utils_1.AppFileType.PLAIN)) {
|
||||
const x64Sha = await (0, sha_1.sha)(path.resolve(opts.x64AppPath, file.relativePath));
|
||||
const arm64Sha = await (0, sha_1.sha)(path.resolve(opts.arm64AppPath, file.relativePath));
|
||||
if (x64Sha !== arm64Sha) {
|
||||
(0, debug_1.d)('SHA for file', file.relativePath, `does not match across builds ${x64Sha}!=${arm64Sha}`);
|
||||
// The MainMenu.nib files generated by Xcode13 are deterministic in effect but not deterministic in generated sequence
|
||||
if (path.basename(path.dirname(file.relativePath)) === 'MainMenu.nib') {
|
||||
// The mismatch here is OK so we just move on to the next one
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Expected all non-binary files to have identical SHAs when creating a universal build but "${file.relativePath}" did not`);
|
||||
}
|
||||
}
|
||||
const knownMergedMachOFiles = new Set();
|
||||
for (const machOFile of x64Files.filter((f) => f.type === file_utils_1.AppFileType.MACHO)) {
|
||||
const first = await fs.realpath(path.resolve(tmpApp, machOFile.relativePath));
|
||||
const second = await fs.realpath(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
const x64Sha = await (0, sha_1.sha)(path.resolve(opts.x64AppPath, machOFile.relativePath));
|
||||
const arm64Sha = await (0, sha_1.sha)(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
if (x64Sha === arm64Sha) {
|
||||
if (opts.x64ArchFiles === undefined ||
|
||||
!(0, minimatch_1.minimatch)(machOFile.relativePath, opts.x64ArchFiles, { matchBase: true })) {
|
||||
throw new Error(`Detected file "${machOFile.relativePath}" that's the same in both x64 and arm64 builds and not covered by the ` +
|
||||
`x64ArchFiles rule: "${opts.x64ArchFiles}"`);
|
||||
}
|
||||
(0, debug_1.d)('SHA for Mach-O file', machOFile.relativePath, `matches across builds ${x64Sha}===${arm64Sha}, skipping lipo`);
|
||||
continue;
|
||||
}
|
||||
(0, debug_1.d)('joining two MachO files with lipo', {
|
||||
first,
|
||||
second,
|
||||
});
|
||||
await (0, cross_spawn_promise_1.spawn)('lipo', [
|
||||
first,
|
||||
second,
|
||||
'-create',
|
||||
'-output',
|
||||
await fs.realpath(path.resolve(tmpApp, machOFile.relativePath)),
|
||||
]);
|
||||
knownMergedMachOFiles.add(machOFile.relativePath);
|
||||
}
|
||||
/**
|
||||
* If we don't have an ASAR we need to check if the two "app" folders are identical, if
|
||||
* they are then we can just leave one there and call it a day. If the app folders for x64
|
||||
* and arm64 are different though we need to rename each folder and create a new fake "app"
|
||||
* entrypoint to dynamically load the correct app folder
|
||||
*/
|
||||
if (x64AsarMode === asar_utils_1.AsarMode.NO_ASAR) {
|
||||
(0, debug_1.d)('checking if the x64 and arm64 app folders are identical');
|
||||
const comparison = await dircompare.compare(path.resolve(tmpApp, 'Contents', 'Resources', 'app'), path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'), { compareSize: true, compareContent: true });
|
||||
const differences = comparison.diffSet.filter((difference) => difference.state !== 'equal');
|
||||
(0, debug_1.d)(`Found ${differences.length} difference(s) between the x64 and arm64 folders`);
|
||||
const nonMergedDifferences = differences.filter((difference) => !difference.name1 ||
|
||||
!knownMergedMachOFiles.has(path.join('Contents', 'Resources', 'app', difference.relativePath, difference.name1)));
|
||||
(0, debug_1.d)(`After discluding MachO files merged with lipo ${nonMergedDifferences.length} remain.`);
|
||||
if (nonMergedDifferences.length > 0) {
|
||||
(0, debug_1.d)('x64 and arm64 app folders are different, creating dynamic entry ASAR');
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64'));
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64'));
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(path.resolve(__dirname, '..', '..', 'entry-asar', 'no-asar.js'), path.resolve(entryAsar, 'index.js'));
|
||||
let pj = await fs.readJson(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app', 'package.json'));
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
await asar.createPackage(entryAsar, path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
}
|
||||
else {
|
||||
(0, debug_1.d)('x64 and arm64 app folders are the same');
|
||||
}
|
||||
}
|
||||
const generatedIntegrity = {};
|
||||
let didSplitAsar = false;
|
||||
/**
|
||||
* If we have an ASAR we just need to check if the two "app.asar" files have the same hash,
|
||||
* if they are, same as above, we can leave one there and call it a day. If they're different
|
||||
* we have to make a dynamic entrypoint. There is an assumption made here that every file in
|
||||
* app.asar.unpacked is a native node module. This assumption _may_ not be true so we should
|
||||
* look at codifying that assumption as actual logic.
|
||||
*/
|
||||
// FIXME: Codify the assumption that app.asar.unpacked only contains native modules
|
||||
if (x64AsarMode === asar_utils_1.AsarMode.HAS_ASAR && opts.mergeASARs) {
|
||||
(0, debug_1.d)('merging x64 and arm64 asars');
|
||||
const output = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await (0, asar_utils_1.mergeASARs)({
|
||||
x64AsarPath: path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'),
|
||||
arm64AsarPath: path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
outputAsarPath: output,
|
||||
singleArchFiles: opts.singleArchFiles,
|
||||
});
|
||||
generatedIntegrity['Resources/app.asar'] = (0, asar_utils_1.generateAsarIntegrity)(output);
|
||||
}
|
||||
else if (x64AsarMode === asar_utils_1.AsarMode.HAS_ASAR) {
|
||||
(0, debug_1.d)('checking if the x64 and arm64 asars are identical');
|
||||
const x64AsarSha = await (0, sha_1.sha)(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
const arm64AsarSha = await (0, sha_1.sha)(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'));
|
||||
if (x64AsarSha !== arm64AsarSha) {
|
||||
didSplitAsar = true;
|
||||
(0, debug_1.d)('x64 and arm64 asars are different');
|
||||
const x64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar');
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'), x64AsarPath);
|
||||
const x64Unpacked = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(x64Unpacked)) {
|
||||
await fs.move(x64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar.unpacked'));
|
||||
}
|
||||
const arm64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar');
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'), arm64AsarPath);
|
||||
const arm64Unpacked = path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(arm64Unpacked)) {
|
||||
await fs.copy(arm64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar.unpacked'));
|
||||
}
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(path.resolve(__dirname, '..', '..', 'entry-asar', 'has-asar.js'), path.resolve(entryAsar, 'index.js'));
|
||||
let pj = JSON.parse((await asar.extractFile(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app.asar'), 'package.json')).toString('utf8'));
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
const asarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await asar.createPackage(entryAsar, asarPath);
|
||||
generatedIntegrity['Resources/app.asar'] = (0, asar_utils_1.generateAsarIntegrity)(asarPath);
|
||||
generatedIntegrity['Resources/app-x64.asar'] = (0, asar_utils_1.generateAsarIntegrity)(x64AsarPath);
|
||||
generatedIntegrity['Resources/app-arm64.asar'] = (0, asar_utils_1.generateAsarIntegrity)(arm64AsarPath);
|
||||
}
|
||||
else {
|
||||
(0, debug_1.d)('x64 and arm64 asars are the same');
|
||||
generatedIntegrity['Resources/app.asar'] = (0, asar_utils_1.generateAsarIntegrity)(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
}
|
||||
}
|
||||
const plistFiles = x64Files.filter((f) => f.type === file_utils_1.AppFileType.INFO_PLIST);
|
||||
for (const plistFile of plistFiles) {
|
||||
const x64PlistPath = path.resolve(opts.x64AppPath, plistFile.relativePath);
|
||||
const arm64PlistPath = path.resolve(opts.arm64AppPath, plistFile.relativePath);
|
||||
const _a = plist.parse(await fs.readFile(x64PlistPath, 'utf8')), { ElectronAsarIntegrity: x64Integrity } = _a, x64Plist = __rest(_a, ["ElectronAsarIntegrity"]);
|
||||
const _b = plist.parse(await fs.readFile(arm64PlistPath, 'utf8')), { ElectronAsarIntegrity: arm64Integrity } = _b, arm64Plist = __rest(_b, ["ElectronAsarIntegrity"]);
|
||||
if (JSON.stringify(x64Plist) !== JSON.stringify(arm64Plist)) {
|
||||
throw new Error(`Expected all Info.plist files to be identical when ignoring integrity when creating a universal build but "${plistFile.relativePath}" was not`);
|
||||
}
|
||||
const injectAsarIntegrity = !opts.infoPlistsToIgnore ||
|
||||
(0, minimatch_1.minimatch)(plistFile.relativePath, opts.infoPlistsToIgnore, { matchBase: true });
|
||||
const mergedPlist = injectAsarIntegrity
|
||||
? Object.assign(Object.assign({}, x64Plist), { ElectronAsarIntegrity: generatedIntegrity }) : Object.assign({}, x64Plist);
|
||||
await fs.writeFile(path.resolve(tmpApp, plistFile.relativePath), plist.build(mergedPlist));
|
||||
}
|
||||
for (const snapshotsFile of arm64Files.filter((f) => f.type === file_utils_1.AppFileType.SNAPSHOT)) {
|
||||
(0, debug_1.d)('copying snapshot file', snapshotsFile.relativePath, 'to target application');
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, snapshotsFile.relativePath), path.resolve(tmpApp, snapshotsFile.relativePath));
|
||||
}
|
||||
(0, debug_1.d)('moving final universal app to target destination');
|
||||
await fs.mkdirp(path.dirname(opts.outAppPath));
|
||||
await (0, cross_spawn_promise_1.spawn)('mv', [tmpApp, opts.outAppPath]);
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
finally {
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
};
|
||||
exports.makeUniversalApp = makeUniversalApp;
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/cjs/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.d.ts
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const sha: (filePath: string) => Promise<any>;
|
||||
39
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.js
generated
vendored
Normal file
39
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sha = void 0;
|
||||
const fs = __importStar(require("fs-extra"));
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const promises_1 = require("stream/promises");
|
||||
const debug_1 = require("./debug");
|
||||
const sha = async (filePath) => {
|
||||
(0, debug_1.d)('hashing', filePath);
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.setEncoding('hex');
|
||||
await (0, promises_1.pipeline)(fs.createReadStream(filePath), hash);
|
||||
return hash.read();
|
||||
};
|
||||
exports.sha = sha;
|
||||
//# sourceMappingURL=sha.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/cjs/sha.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"sha.js","sourceRoot":"","sources":["../../src/sha.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,6CAA+B;AAC/B,+CAAiC;AACjC,8CAA2C;AAE3C,mCAA4B;AAErB,MAAM,GAAG,GAAG,KAAK,EAAE,QAAgB,EAAE,EAAE;IAC5C,IAAA,SAAC,EAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IACvB,MAAM,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACzC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACxB,MAAM,IAAA,mBAAQ,EAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;AACrB,CAAC,CAAC;AANW,QAAA,GAAG,OAMd"}
|
||||
16
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.d.ts
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export declare enum AsarMode {
|
||||
NO_ASAR = 0,
|
||||
HAS_ASAR = 1
|
||||
}
|
||||
export type MergeASARsOptions = {
|
||||
x64AsarPath: string;
|
||||
arm64AsarPath: string;
|
||||
outputAsarPath: string;
|
||||
singleArchFiles?: string;
|
||||
};
|
||||
export declare const detectAsarMode: (appPath: string) => Promise<AsarMode>;
|
||||
export declare const generateAsarIntegrity: (asarPath: string) => {
|
||||
algorithm: "SHA256";
|
||||
hash: string;
|
||||
};
|
||||
export declare const mergeASARs: ({ x64AsarPath, arm64AsarPath, outputAsarPath, singleArchFiles, }: MergeASARsOptions) => Promise<void>;
|
||||
166
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.js
generated
vendored
Normal file
166
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.js
generated
vendored
Normal file
@@ -0,0 +1,166 @@
|
||||
import asar from '@electron/asar';
|
||||
import { execFileSync } from 'child_process';
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { minimatch } from 'minimatch';
|
||||
import os from 'os';
|
||||
import { d } from './debug';
|
||||
const LIPO = 'lipo';
|
||||
export var AsarMode;
|
||||
(function (AsarMode) {
|
||||
AsarMode[AsarMode["NO_ASAR"] = 0] = "NO_ASAR";
|
||||
AsarMode[AsarMode["HAS_ASAR"] = 1] = "HAS_ASAR";
|
||||
})(AsarMode || (AsarMode = {}));
|
||||
// See: https://github.com/apple-opensource-mirror/llvmCore/blob/0c60489d96c87140db9a6a14c6e82b15f5e5d252/include/llvm/Object/MachOFormat.h#L108-L112
|
||||
const MACHO_MAGIC = new Set([
|
||||
// 32-bit Mach-O
|
||||
0xfeedface, 0xcefaedfe,
|
||||
// 64-bit Mach-O
|
||||
0xfeedfacf, 0xcffaedfe,
|
||||
]);
|
||||
const MACHO_UNIVERSAL_MAGIC = new Set([
|
||||
// universal
|
||||
0xcafebabe, 0xbebafeca,
|
||||
]);
|
||||
export const detectAsarMode = async (appPath) => {
|
||||
d('checking asar mode of', appPath);
|
||||
const asarPath = path.resolve(appPath, 'Contents', 'Resources', 'app.asar');
|
||||
if (!(await fs.pathExists(asarPath))) {
|
||||
d('determined no asar');
|
||||
return AsarMode.NO_ASAR;
|
||||
}
|
||||
d('determined has asar');
|
||||
return AsarMode.HAS_ASAR;
|
||||
};
|
||||
export const generateAsarIntegrity = (asarPath) => {
|
||||
return {
|
||||
algorithm: 'SHA256',
|
||||
hash: crypto
|
||||
.createHash('SHA256')
|
||||
.update(asar.getRawHeader(asarPath).headerString)
|
||||
.digest('hex'),
|
||||
};
|
||||
};
|
||||
function toRelativePath(file) {
|
||||
return file.replace(/^\//, '');
|
||||
}
|
||||
function isDirectory(a, file) {
|
||||
return Boolean('files' in asar.statFile(a, file));
|
||||
}
|
||||
function checkSingleArch(archive, file, allowList) {
|
||||
if (allowList === undefined || !minimatch(file, allowList, { matchBase: true })) {
|
||||
throw new Error(`Detected unique file "${file}" in "${archive}" not covered by ` +
|
||||
`allowList rule: "${allowList}"`);
|
||||
}
|
||||
}
|
||||
export const mergeASARs = async ({ x64AsarPath, arm64AsarPath, outputAsarPath, singleArchFiles, }) => {
|
||||
d(`merging ${x64AsarPath} and ${arm64AsarPath}`);
|
||||
const x64Files = new Set(asar.listPackage(x64AsarPath).map(toRelativePath));
|
||||
const arm64Files = new Set(asar.listPackage(arm64AsarPath).map(toRelativePath));
|
||||
//
|
||||
// Build set of unpacked directories and files
|
||||
//
|
||||
const unpackedFiles = new Set();
|
||||
function buildUnpacked(a, fileList) {
|
||||
for (const file of fileList) {
|
||||
const stat = asar.statFile(a, file);
|
||||
if (!('unpacked' in stat) || !stat.unpacked) {
|
||||
continue;
|
||||
}
|
||||
if ('files' in stat) {
|
||||
continue;
|
||||
}
|
||||
unpackedFiles.add(file);
|
||||
}
|
||||
}
|
||||
buildUnpacked(x64AsarPath, x64Files);
|
||||
buildUnpacked(arm64AsarPath, arm64Files);
|
||||
//
|
||||
// Build list of files/directories unique to each asar
|
||||
//
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
checkSingleArch(x64AsarPath, file, singleArchFiles);
|
||||
}
|
||||
}
|
||||
const arm64Unique = [];
|
||||
for (const file of arm64Files) {
|
||||
if (!x64Files.has(file)) {
|
||||
checkSingleArch(arm64AsarPath, file, singleArchFiles);
|
||||
arm64Unique.push(file);
|
||||
}
|
||||
}
|
||||
//
|
||||
// Find common bindings with different content
|
||||
//
|
||||
const commonBindings = [];
|
||||
for (const file of x64Files) {
|
||||
if (!arm64Files.has(file)) {
|
||||
continue;
|
||||
}
|
||||
// Skip directories
|
||||
if (isDirectory(x64AsarPath, file)) {
|
||||
continue;
|
||||
}
|
||||
const x64Content = asar.extractFile(x64AsarPath, file);
|
||||
const arm64Content = asar.extractFile(arm64AsarPath, file);
|
||||
if (x64Content.compare(arm64Content) === 0) {
|
||||
continue;
|
||||
}
|
||||
if (MACHO_UNIVERSAL_MAGIC.has(x64Content.readUInt32LE(0)) &&
|
||||
MACHO_UNIVERSAL_MAGIC.has(arm64Content.readUInt32LE(0))) {
|
||||
continue;
|
||||
}
|
||||
if (!MACHO_MAGIC.has(x64Content.readUInt32LE(0))) {
|
||||
throw new Error(`Can't reconcile two non-macho files ${file}`);
|
||||
}
|
||||
commonBindings.push(file);
|
||||
}
|
||||
//
|
||||
// Extract both
|
||||
//
|
||||
const x64Dir = await fs.mkdtemp(path.join(os.tmpdir(), 'x64-'));
|
||||
const arm64Dir = await fs.mkdtemp(path.join(os.tmpdir(), 'arm64-'));
|
||||
try {
|
||||
d(`extracting ${x64AsarPath} to ${x64Dir}`);
|
||||
asar.extractAll(x64AsarPath, x64Dir);
|
||||
d(`extracting ${arm64AsarPath} to ${arm64Dir}`);
|
||||
asar.extractAll(arm64AsarPath, arm64Dir);
|
||||
for (const file of arm64Unique) {
|
||||
const source = path.resolve(arm64Dir, file);
|
||||
const destination = path.resolve(x64Dir, file);
|
||||
if (isDirectory(arm64AsarPath, file)) {
|
||||
d(`creating unique directory: ${file}`);
|
||||
await fs.mkdirp(destination);
|
||||
continue;
|
||||
}
|
||||
d(`xopying unique file: ${file}`);
|
||||
await fs.mkdirp(path.dirname(destination));
|
||||
await fs.copy(source, destination);
|
||||
}
|
||||
for (const binding of commonBindings) {
|
||||
const source = await fs.realpath(path.resolve(arm64Dir, binding));
|
||||
const destination = await fs.realpath(path.resolve(x64Dir, binding));
|
||||
d(`merging binding: ${binding}`);
|
||||
execFileSync(LIPO, [source, destination, '-create', '-output', destination]);
|
||||
}
|
||||
d(`creating archive at ${outputAsarPath}`);
|
||||
const resolvedUnpack = Array.from(unpackedFiles).map((file) => path.join(x64Dir, file));
|
||||
let unpack;
|
||||
if (resolvedUnpack.length > 1) {
|
||||
unpack = `{${resolvedUnpack.join(',')}}`;
|
||||
}
|
||||
else if (resolvedUnpack.length === 1) {
|
||||
unpack = resolvedUnpack[0];
|
||||
}
|
||||
await asar.createPackageWithOptions(x64Dir, outputAsarPath, {
|
||||
unpack,
|
||||
});
|
||||
d('done merging');
|
||||
}
|
||||
finally {
|
||||
await Promise.all([fs.remove(x64Dir), fs.remove(arm64Dir)]);
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=asar-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/asar-utils.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
2
desktop-operator/node_modules/@electron/universal/dist/esm/debug.d.ts
generated
vendored
Normal file
2
desktop-operator/node_modules/@electron/universal/dist/esm/debug.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
import debug from 'debug';
|
||||
export declare const d: debug.Debugger;
|
||||
3
desktop-operator/node_modules/@electron/universal/dist/esm/debug.js
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/universal/dist/esm/debug.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import debug from 'debug';
|
||||
export const d = debug('electron-universal');
|
||||
//# sourceMappingURL=debug.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/esm/debug.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/debug.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"debug.js","sourceRoot":"","sources":["../../src/debug.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,MAAM,OAAO,CAAC;AAE1B,MAAM,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,oBAAoB,CAAC,CAAC"}
|
||||
16
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.d.ts
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
export declare enum AppFileType {
|
||||
MACHO = 0,
|
||||
PLAIN = 1,
|
||||
INFO_PLIST = 2,
|
||||
SNAPSHOT = 3,
|
||||
APP_CODE = 4
|
||||
}
|
||||
export type AppFile = {
|
||||
relativePath: string;
|
||||
type: AppFileType;
|
||||
};
|
||||
/**
|
||||
*
|
||||
* @param appPath Path to the application
|
||||
*/
|
||||
export declare const getAllAppFiles: (appPath: string) => Promise<AppFile[]>;
|
||||
68
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.js
generated
vendored
Normal file
68
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.js
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import { spawn, ExitCodeError } from '@malept/cross-spawn-promise';
|
||||
import * as fs from 'fs-extra';
|
||||
import * as path from 'path';
|
||||
const MACHO_PREFIX = 'Mach-O ';
|
||||
export var AppFileType;
|
||||
(function (AppFileType) {
|
||||
AppFileType[AppFileType["MACHO"] = 0] = "MACHO";
|
||||
AppFileType[AppFileType["PLAIN"] = 1] = "PLAIN";
|
||||
AppFileType[AppFileType["INFO_PLIST"] = 2] = "INFO_PLIST";
|
||||
AppFileType[AppFileType["SNAPSHOT"] = 3] = "SNAPSHOT";
|
||||
AppFileType[AppFileType["APP_CODE"] = 4] = "APP_CODE";
|
||||
})(AppFileType || (AppFileType = {}));
|
||||
/**
|
||||
*
|
||||
* @param appPath Path to the application
|
||||
*/
|
||||
export const getAllAppFiles = async (appPath) => {
|
||||
const files = [];
|
||||
const visited = new Set();
|
||||
const traverse = async (p) => {
|
||||
p = await fs.realpath(p);
|
||||
if (visited.has(p))
|
||||
return;
|
||||
visited.add(p);
|
||||
const info = await fs.stat(p);
|
||||
if (info.isSymbolicLink())
|
||||
return;
|
||||
if (info.isFile()) {
|
||||
let fileType = AppFileType.PLAIN;
|
||||
var fileOutput = '';
|
||||
try {
|
||||
fileOutput = await spawn('file', ['--brief', '--no-pad', p]);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof ExitCodeError) {
|
||||
/* silently accept error codes from "file" */
|
||||
}
|
||||
else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
if (p.includes('app.asar')) {
|
||||
fileType = AppFileType.APP_CODE;
|
||||
}
|
||||
else if (fileOutput.startsWith(MACHO_PREFIX)) {
|
||||
fileType = AppFileType.MACHO;
|
||||
}
|
||||
else if (p.endsWith('.bin')) {
|
||||
fileType = AppFileType.SNAPSHOT;
|
||||
}
|
||||
else if (path.basename(p) === 'Info.plist') {
|
||||
fileType = AppFileType.INFO_PLIST;
|
||||
}
|
||||
files.push({
|
||||
relativePath: path.relative(appPath, p),
|
||||
type: fileType,
|
||||
});
|
||||
}
|
||||
if (info.isDirectory()) {
|
||||
for (const child of await fs.readdir(p)) {
|
||||
await traverse(path.resolve(p, child));
|
||||
}
|
||||
}
|
||||
};
|
||||
await traverse(appPath);
|
||||
return files;
|
||||
};
|
||||
//# sourceMappingURL=file-utils.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/file-utils.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"file-utils.js","sourceRoot":"","sources":["../../src/file-utils.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,aAAa,EAAE,MAAM,6BAA6B,CAAC;AACnE,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,IAAI,MAAM,MAAM,CAAC;AAE7B,MAAM,YAAY,GAAG,SAAS,CAAC;AAE/B,MAAM,CAAN,IAAY,WAMX;AAND,WAAY,WAAW;IACrB,+CAAK,CAAA;IACL,+CAAK,CAAA;IACL,yDAAU,CAAA;IACV,qDAAQ,CAAA;IACR,qDAAQ,CAAA;AACV,CAAC,EANW,WAAW,KAAX,WAAW,QAMtB;AAOD;;;GAGG;AACH,MAAM,CAAC,MAAM,cAAc,GAAG,KAAK,EAAE,OAAe,EAAsB,EAAE;IAC1E,MAAM,KAAK,GAAc,EAAE,CAAC;IAE5B,MAAM,OAAO,GAAG,IAAI,GAAG,EAAU,CAAC;IAClC,MAAM,QAAQ,GAAG,KAAK,EAAE,CAAS,EAAE,EAAE;QACnC,CAAC,GAAG,MAAM,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QACzB,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC;YAAE,OAAO;QAC3B,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;QAEf,MAAM,IAAI,GAAG,MAAM,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;QAC9B,IAAI,IAAI,CAAC,cAAc,EAAE;YAAE,OAAO;QAClC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE;YACjB,IAAI,QAAQ,GAAG,WAAW,CAAC,KAAK,CAAC;YAEjC,IAAI,UAAU,GAAG,EAAE,CAAC;YACpB,IAAI;gBACF,UAAU,GAAG,MAAM,KAAK,CAAC,MAAM,EAAE,CAAC,SAAS,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,CAAC;aAC9D;YAAC,OAAO,CAAC,EAAE;gBACV,IAAI,CAAC,YAAY,aAAa,EAAE;oBAC9B,6CAA6C;iBAC9C;qBAAM;oBACL,MAAM,CAAC,CAAC;iBACT;aACF;YACD,IAAI,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,EAAE;gBAC1B,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC;aACjC;iBAAM,IAAI,UAAU,CAAC,UAAU,CAAC,YAAY,CAAC,EAAE;gBAC9C,QAAQ,GAAG,WAAW,CAAC,KAAK,CAAC;aAC9B;iBAAM,IAAI,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE;gBAC7B,QAAQ,GAAG,WAAW,CAAC,QAAQ,CAAC;aACjC;iBAAM,IAAI,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,KAAK,YAAY,EAAE;gBAC5C,QAAQ,GAAG,WAAW,CAAC,UAAU,CAAC;aACnC;YAED,KAAK,CAAC,IAAI,CAAC;gBACT,YAAY,EAAE,IAAI,CAAC,QAAQ,CAAC,OAAO,EAAE,CAAC,CAAC;gBACvC,IAAI,EAAE,QAAQ;aACf,CAAC,CAAC;SACJ;QAED,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE;YACtB,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;gBACvC,MAAM,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC;aACxC;SACF;IACH,CAAC,CAAC;IACF,MAAM,QAAQ,CAAC,OAAO,CAAC,CAAC;IAExB,OAAO,KAAK,CAAC;AACf,CAAC,CAAC"}
|
||||
37
desktop-operator/node_modules/@electron/universal/dist/esm/index.d.ts
generated
vendored
Normal file
37
desktop-operator/node_modules/@electron/universal/dist/esm/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
export type MakeUniversalOpts = {
|
||||
/**
|
||||
* Absolute file system path to the x64 version of your application. E.g. /Foo/bar/MyApp_x64.app
|
||||
*/
|
||||
x64AppPath: string;
|
||||
/**
|
||||
* Absolute file system path to the arm64 version of your application. E.g. /Foo/bar/MyApp_arm64.app
|
||||
*/
|
||||
arm64AppPath: string;
|
||||
/**
|
||||
* Absolute file system path you want the universal app to be written to. E.g. /Foo/var/MyApp_universal.app
|
||||
*
|
||||
* If this file exists it will be overwritten ONLY if "force" is set to true
|
||||
*/
|
||||
outAppPath: string;
|
||||
/**
|
||||
* Forcefully overwrite any existing files that are in the way of generating the universal application
|
||||
*/
|
||||
force?: boolean;
|
||||
/**
|
||||
* Merge x64 and arm64 ASARs into one.
|
||||
*/
|
||||
mergeASARs?: boolean;
|
||||
/**
|
||||
* Minimatch pattern of paths that are allowed to be present in one of the ASAR files, but not in the other.
|
||||
*/
|
||||
singleArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of binaries that are expected to be the same x64 binary in both of the ASAR files.
|
||||
*/
|
||||
x64ArchFiles?: string;
|
||||
/**
|
||||
* Minimatch pattern of paths that should not receive an injected ElectronAsarIntegrity value
|
||||
*/
|
||||
infoPlistsToIgnore?: string;
|
||||
};
|
||||
export declare const makeUniversalApp: (opts: MakeUniversalOpts) => Promise<void>;
|
||||
235
desktop-operator/node_modules/@electron/universal/dist/esm/index.js
generated
vendored
Normal file
235
desktop-operator/node_modules/@electron/universal/dist/esm/index.js
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
var __rest = (this && this.__rest) || function (s, e) {
|
||||
var t = {};
|
||||
for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0)
|
||||
t[p] = s[p];
|
||||
if (s != null && typeof Object.getOwnPropertySymbols === "function")
|
||||
for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) {
|
||||
if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i]))
|
||||
t[p[i]] = s[p[i]];
|
||||
}
|
||||
return t;
|
||||
};
|
||||
import { spawn } from '@malept/cross-spawn-promise';
|
||||
import * as asar from '@electron/asar';
|
||||
import * as fs from 'fs-extra';
|
||||
import { minimatch } from 'minimatch';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import * as plist from 'plist';
|
||||
import * as dircompare from 'dir-compare';
|
||||
import { AppFileType, getAllAppFiles } from './file-utils';
|
||||
import { AsarMode, detectAsarMode, generateAsarIntegrity, mergeASARs } from './asar-utils';
|
||||
import { sha } from './sha';
|
||||
import { d } from './debug';
|
||||
const dupedFiles = (files) => files.filter((f) => f.type !== AppFileType.SNAPSHOT && f.type !== AppFileType.APP_CODE);
|
||||
export const makeUniversalApp = async (opts) => {
|
||||
d('making a universal app with options', opts);
|
||||
if (process.platform !== 'darwin')
|
||||
throw new Error('@electron/universal is only supported on darwin platforms');
|
||||
if (!opts.x64AppPath || !path.isAbsolute(opts.x64AppPath))
|
||||
throw new Error('Expected opts.x64AppPath to be an absolute path but it was not');
|
||||
if (!opts.arm64AppPath || !path.isAbsolute(opts.arm64AppPath))
|
||||
throw new Error('Expected opts.arm64AppPath to be an absolute path but it was not');
|
||||
if (!opts.outAppPath || !path.isAbsolute(opts.outAppPath))
|
||||
throw new Error('Expected opts.outAppPath to be an absolute path but it was not');
|
||||
if (await fs.pathExists(opts.outAppPath)) {
|
||||
d('output path exists already');
|
||||
if (!opts.force) {
|
||||
throw new Error(`The out path "${opts.outAppPath}" already exists and force is not set to true`);
|
||||
}
|
||||
else {
|
||||
d('overwriting existing application because force == true');
|
||||
await fs.remove(opts.outAppPath);
|
||||
}
|
||||
}
|
||||
const x64AsarMode = await detectAsarMode(opts.x64AppPath);
|
||||
const arm64AsarMode = await detectAsarMode(opts.arm64AppPath);
|
||||
d('detected x64AsarMode =', x64AsarMode);
|
||||
d('detected arm64AsarMode =', arm64AsarMode);
|
||||
if (x64AsarMode !== arm64AsarMode)
|
||||
throw new Error('Both the x64 and arm64 versions of your application need to have been built with the same asar settings (enabled vs disabled)');
|
||||
const tmpDir = await fs.mkdtemp(path.resolve(os.tmpdir(), 'electron-universal-'));
|
||||
d('building universal app in', tmpDir);
|
||||
try {
|
||||
d('copying x64 app as starter template');
|
||||
const tmpApp = path.resolve(tmpDir, 'Tmp.app');
|
||||
await spawn('cp', ['-R', opts.x64AppPath, tmpApp]);
|
||||
const uniqueToX64 = [];
|
||||
const uniqueToArm64 = [];
|
||||
const x64Files = await getAllAppFiles(await fs.realpath(tmpApp));
|
||||
const arm64Files = await getAllAppFiles(await fs.realpath(opts.arm64AppPath));
|
||||
for (const file of dupedFiles(x64Files)) {
|
||||
if (!arm64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToX64.push(file.relativePath);
|
||||
}
|
||||
for (const file of dupedFiles(arm64Files)) {
|
||||
if (!x64Files.some((f) => f.relativePath === file.relativePath))
|
||||
uniqueToArm64.push(file.relativePath);
|
||||
}
|
||||
if (uniqueToX64.length !== 0 || uniqueToArm64.length !== 0) {
|
||||
d('some files were not in both builds, aborting');
|
||||
console.error({
|
||||
uniqueToX64,
|
||||
uniqueToArm64,
|
||||
});
|
||||
throw new Error('While trying to merge mach-o files across your apps we found a mismatch, the number of mach-o files is not the same between the arm64 and x64 builds');
|
||||
}
|
||||
for (const file of x64Files.filter((f) => f.type === AppFileType.PLAIN)) {
|
||||
const x64Sha = await sha(path.resolve(opts.x64AppPath, file.relativePath));
|
||||
const arm64Sha = await sha(path.resolve(opts.arm64AppPath, file.relativePath));
|
||||
if (x64Sha !== arm64Sha) {
|
||||
d('SHA for file', file.relativePath, `does not match across builds ${x64Sha}!=${arm64Sha}`);
|
||||
// The MainMenu.nib files generated by Xcode13 are deterministic in effect but not deterministic in generated sequence
|
||||
if (path.basename(path.dirname(file.relativePath)) === 'MainMenu.nib') {
|
||||
// The mismatch here is OK so we just move on to the next one
|
||||
continue;
|
||||
}
|
||||
throw new Error(`Expected all non-binary files to have identical SHAs when creating a universal build but "${file.relativePath}" did not`);
|
||||
}
|
||||
}
|
||||
const knownMergedMachOFiles = new Set();
|
||||
for (const machOFile of x64Files.filter((f) => f.type === AppFileType.MACHO)) {
|
||||
const first = await fs.realpath(path.resolve(tmpApp, machOFile.relativePath));
|
||||
const second = await fs.realpath(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
const x64Sha = await sha(path.resolve(opts.x64AppPath, machOFile.relativePath));
|
||||
const arm64Sha = await sha(path.resolve(opts.arm64AppPath, machOFile.relativePath));
|
||||
if (x64Sha === arm64Sha) {
|
||||
if (opts.x64ArchFiles === undefined ||
|
||||
!minimatch(machOFile.relativePath, opts.x64ArchFiles, { matchBase: true })) {
|
||||
throw new Error(`Detected file "${machOFile.relativePath}" that's the same in both x64 and arm64 builds and not covered by the ` +
|
||||
`x64ArchFiles rule: "${opts.x64ArchFiles}"`);
|
||||
}
|
||||
d('SHA for Mach-O file', machOFile.relativePath, `matches across builds ${x64Sha}===${arm64Sha}, skipping lipo`);
|
||||
continue;
|
||||
}
|
||||
d('joining two MachO files with lipo', {
|
||||
first,
|
||||
second,
|
||||
});
|
||||
await spawn('lipo', [
|
||||
first,
|
||||
second,
|
||||
'-create',
|
||||
'-output',
|
||||
await fs.realpath(path.resolve(tmpApp, machOFile.relativePath)),
|
||||
]);
|
||||
knownMergedMachOFiles.add(machOFile.relativePath);
|
||||
}
|
||||
/**
|
||||
* If we don't have an ASAR we need to check if the two "app" folders are identical, if
|
||||
* they are then we can just leave one there and call it a day. If the app folders for x64
|
||||
* and arm64 are different though we need to rename each folder and create a new fake "app"
|
||||
* entrypoint to dynamically load the correct app folder
|
||||
*/
|
||||
if (x64AsarMode === AsarMode.NO_ASAR) {
|
||||
d('checking if the x64 and arm64 app folders are identical');
|
||||
const comparison = await dircompare.compare(path.resolve(tmpApp, 'Contents', 'Resources', 'app'), path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'), { compareSize: true, compareContent: true });
|
||||
const differences = comparison.diffSet.filter((difference) => difference.state !== 'equal');
|
||||
d(`Found ${differences.length} difference(s) between the x64 and arm64 folders`);
|
||||
const nonMergedDifferences = differences.filter((difference) => !difference.name1 ||
|
||||
!knownMergedMachOFiles.has(path.join('Contents', 'Resources', 'app', difference.relativePath, difference.name1)));
|
||||
d(`After discluding MachO files merged with lipo ${nonMergedDifferences.length} remain.`);
|
||||
if (nonMergedDifferences.length > 0) {
|
||||
d('x64 and arm64 app folders are different, creating dynamic entry ASAR');
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64'));
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app'), path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64'));
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(path.resolve(__dirname, '..', '..', 'entry-asar', 'no-asar.js'), path.resolve(entryAsar, 'index.js'));
|
||||
let pj = await fs.readJson(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app', 'package.json'));
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
await asar.createPackage(entryAsar, path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
}
|
||||
else {
|
||||
d('x64 and arm64 app folders are the same');
|
||||
}
|
||||
}
|
||||
const generatedIntegrity = {};
|
||||
let didSplitAsar = false;
|
||||
/**
|
||||
* If we have an ASAR we just need to check if the two "app.asar" files have the same hash,
|
||||
* if they are, same as above, we can leave one there and call it a day. If they're different
|
||||
* we have to make a dynamic entrypoint. There is an assumption made here that every file in
|
||||
* app.asar.unpacked is a native node module. This assumption _may_ not be true so we should
|
||||
* look at codifying that assumption as actual logic.
|
||||
*/
|
||||
// FIXME: Codify the assumption that app.asar.unpacked only contains native modules
|
||||
if (x64AsarMode === AsarMode.HAS_ASAR && opts.mergeASARs) {
|
||||
d('merging x64 and arm64 asars');
|
||||
const output = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await mergeASARs({
|
||||
x64AsarPath: path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'),
|
||||
arm64AsarPath: path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'),
|
||||
outputAsarPath: output,
|
||||
singleArchFiles: opts.singleArchFiles,
|
||||
});
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(output);
|
||||
}
|
||||
else if (x64AsarMode === AsarMode.HAS_ASAR) {
|
||||
d('checking if the x64 and arm64 asars are identical');
|
||||
const x64AsarSha = await sha(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
const arm64AsarSha = await sha(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'));
|
||||
if (x64AsarSha !== arm64AsarSha) {
|
||||
didSplitAsar = true;
|
||||
d('x64 and arm64 asars are different');
|
||||
const x64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar');
|
||||
await fs.move(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'), x64AsarPath);
|
||||
const x64Unpacked = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(x64Unpacked)) {
|
||||
await fs.move(x64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'app-x64.asar.unpacked'));
|
||||
}
|
||||
const arm64AsarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar');
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar'), arm64AsarPath);
|
||||
const arm64Unpacked = path.resolve(opts.arm64AppPath, 'Contents', 'Resources', 'app.asar.unpacked');
|
||||
if (await fs.pathExists(arm64Unpacked)) {
|
||||
await fs.copy(arm64Unpacked, path.resolve(tmpApp, 'Contents', 'Resources', 'app-arm64.asar.unpacked'));
|
||||
}
|
||||
const entryAsar = path.resolve(tmpDir, 'entry-asar');
|
||||
await fs.mkdir(entryAsar);
|
||||
await fs.copy(path.resolve(__dirname, '..', '..', 'entry-asar', 'has-asar.js'), path.resolve(entryAsar, 'index.js'));
|
||||
let pj = JSON.parse((await asar.extractFile(path.resolve(opts.x64AppPath, 'Contents', 'Resources', 'app.asar'), 'package.json')).toString('utf8'));
|
||||
pj.main = 'index.js';
|
||||
await fs.writeJson(path.resolve(entryAsar, 'package.json'), pj);
|
||||
const asarPath = path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar');
|
||||
await asar.createPackage(entryAsar, asarPath);
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(asarPath);
|
||||
generatedIntegrity['Resources/app-x64.asar'] = generateAsarIntegrity(x64AsarPath);
|
||||
generatedIntegrity['Resources/app-arm64.asar'] = generateAsarIntegrity(arm64AsarPath);
|
||||
}
|
||||
else {
|
||||
d('x64 and arm64 asars are the same');
|
||||
generatedIntegrity['Resources/app.asar'] = generateAsarIntegrity(path.resolve(tmpApp, 'Contents', 'Resources', 'app.asar'));
|
||||
}
|
||||
}
|
||||
const plistFiles = x64Files.filter((f) => f.type === AppFileType.INFO_PLIST);
|
||||
for (const plistFile of plistFiles) {
|
||||
const x64PlistPath = path.resolve(opts.x64AppPath, plistFile.relativePath);
|
||||
const arm64PlistPath = path.resolve(opts.arm64AppPath, plistFile.relativePath);
|
||||
const _a = plist.parse(await fs.readFile(x64PlistPath, 'utf8')), { ElectronAsarIntegrity: x64Integrity } = _a, x64Plist = __rest(_a, ["ElectronAsarIntegrity"]);
|
||||
const _b = plist.parse(await fs.readFile(arm64PlistPath, 'utf8')), { ElectronAsarIntegrity: arm64Integrity } = _b, arm64Plist = __rest(_b, ["ElectronAsarIntegrity"]);
|
||||
if (JSON.stringify(x64Plist) !== JSON.stringify(arm64Plist)) {
|
||||
throw new Error(`Expected all Info.plist files to be identical when ignoring integrity when creating a universal build but "${plistFile.relativePath}" was not`);
|
||||
}
|
||||
const injectAsarIntegrity = !opts.infoPlistsToIgnore ||
|
||||
minimatch(plistFile.relativePath, opts.infoPlistsToIgnore, { matchBase: true });
|
||||
const mergedPlist = injectAsarIntegrity
|
||||
? Object.assign(Object.assign({}, x64Plist), { ElectronAsarIntegrity: generatedIntegrity }) : Object.assign({}, x64Plist);
|
||||
await fs.writeFile(path.resolve(tmpApp, plistFile.relativePath), plist.build(mergedPlist));
|
||||
}
|
||||
for (const snapshotsFile of arm64Files.filter((f) => f.type === AppFileType.SNAPSHOT)) {
|
||||
d('copying snapshot file', snapshotsFile.relativePath, 'to target application');
|
||||
await fs.copy(path.resolve(opts.arm64AppPath, snapshotsFile.relativePath), path.resolve(tmpApp, snapshotsFile.relativePath));
|
||||
}
|
||||
d('moving final universal app to target destination');
|
||||
await fs.mkdirp(path.dirname(opts.outAppPath));
|
||||
await spawn('mv', [tmpApp, opts.outAppPath]);
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
finally {
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
};
|
||||
//# sourceMappingURL=index.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/esm/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
desktop-operator/node_modules/@electron/universal/dist/esm/sha.d.ts
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/sha.d.ts
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export declare const sha: (filePath: string) => Promise<any>;
|
||||
12
desktop-operator/node_modules/@electron/universal/dist/esm/sha.js
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/universal/dist/esm/sha.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import * as fs from 'fs-extra';
|
||||
import * as crypto from 'crypto';
|
||||
import { pipeline } from 'stream/promises';
|
||||
import { d } from './debug';
|
||||
export const sha = async (filePath) => {
|
||||
d('hashing', filePath);
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.setEncoding('hex');
|
||||
await pipeline(fs.createReadStream(filePath), hash);
|
||||
return hash.read();
|
||||
};
|
||||
//# sourceMappingURL=sha.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/dist/esm/sha.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/dist/esm/sha.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"sha.js","sourceRoot":"","sources":["../../src/sha.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,UAAU,CAAC;AAC/B,OAAO,KAAK,MAAM,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,QAAQ,EAAE,MAAM,iBAAiB,CAAC;AAE3C,OAAO,EAAE,CAAC,EAAE,MAAM,SAAS,CAAC;AAE5B,MAAM,CAAC,MAAM,GAAG,GAAG,KAAK,EAAE,QAAgB,EAAE,EAAE;IAC5C,CAAC,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;IACvB,MAAM,IAAI,GAAG,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC;IACzC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,CAAC;IACxB,MAAM,QAAQ,CAAC,EAAE,CAAC,gBAAgB,CAAC,QAAQ,CAAC,EAAE,IAAI,CAAC,CAAC;IACpD,OAAO,IAAI,CAAC,IAAI,EAAE,CAAC;AACrB,CAAC,CAAC"}
|
||||
28
desktop-operator/node_modules/@electron/universal/entry-asar/has-asar.js
generated
vendored
Normal file
28
desktop-operator/node_modules/@electron/universal/entry-asar/has-asar.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const electron_1 = require("electron");
|
||||
const path_1 = __importDefault(require("path"));
|
||||
if (process.arch === 'arm64') {
|
||||
setPaths('arm64');
|
||||
}
|
||||
else {
|
||||
setPaths('x64');
|
||||
}
|
||||
function setPaths(platform) {
|
||||
// This should return the full path, ending in something like
|
||||
// Notion.app/Contents/Resources/app.asar
|
||||
const appPath = electron_1.app.getAppPath();
|
||||
const asarFile = `app-${platform}.asar`;
|
||||
// Maybe we'll handle this in Electron one day
|
||||
if (path_1.default.basename(appPath) === 'app.asar') {
|
||||
const platformAppPath = path_1.default.join(path_1.default.dirname(appPath), asarFile);
|
||||
// This is an undocumented API. It exists.
|
||||
electron_1.app.setAppPath(platformAppPath);
|
||||
}
|
||||
process._archPath = require.resolve(`../${asarFile}`);
|
||||
}
|
||||
require(process._archPath);
|
||||
//# sourceMappingURL=has-asar.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/entry-asar/has-asar.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/entry-asar/has-asar.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"has-asar.js","sourceRoot":"","sources":["has-asar.ts"],"names":[],"mappings":";;;;;AAAA,uCAA+B;AAC/B,gDAAwB;AAExB,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE;IAC5B,QAAQ,CAAC,OAAO,CAAC,CAAC;CACnB;KAAM;IACL,QAAQ,CAAC,KAAK,CAAC,CAAC;CACjB;AAED,SAAS,QAAQ,CAAC,QAAgB;IAChC,6DAA6D;IAC7D,yCAAyC;IACzC,MAAM,OAAO,GAAG,cAAG,CAAC,UAAU,EAAE,CAAC;IACjC,MAAM,QAAQ,GAAG,OAAO,QAAQ,OAAO,CAAC;IAExC,8CAA8C;IAC9C,IAAI,cAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,UAAU,EAAE;QACzC,MAAM,eAAe,GAAG,cAAI,CAAC,IAAI,CAAC,cAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;QAEnE,0CAA0C;QAC1C,cAAG,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC;KACjC;IAED,OAAO,CAAC,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,QAAQ,EAAE,CAAC,CAAC;AACxD,CAAC;AAED,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC"}
|
||||
28
desktop-operator/node_modules/@electron/universal/entry-asar/no-asar.js
generated
vendored
Normal file
28
desktop-operator/node_modules/@electron/universal/entry-asar/no-asar.js
generated
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const electron_1 = require("electron");
|
||||
const path_1 = __importDefault(require("path"));
|
||||
if (process.arch === 'arm64') {
|
||||
setPaths('arm64');
|
||||
}
|
||||
else {
|
||||
setPaths('x64');
|
||||
}
|
||||
function setPaths(platform) {
|
||||
// This should return the full path, ending in something like
|
||||
// Notion.app/Contents/Resources/app
|
||||
const appPath = electron_1.app.getAppPath();
|
||||
const appFolder = `app-${platform}`;
|
||||
// Maybe we'll handle this in Electron one day
|
||||
if (path_1.default.basename(appPath) === 'app') {
|
||||
const platformAppPath = path_1.default.join(path_1.default.dirname(appPath), appFolder);
|
||||
// This is an undocumented private API. It exists.
|
||||
electron_1.app.setAppPath(platformAppPath);
|
||||
}
|
||||
process._archPath = require.resolve(`../${appFolder}`);
|
||||
}
|
||||
require(process._archPath);
|
||||
//# sourceMappingURL=no-asar.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/entry-asar/no-asar.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/entry-asar/no-asar.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"no-asar.js","sourceRoot":"","sources":["no-asar.ts"],"names":[],"mappings":";;;;;AAAA,uCAA+B;AAC/B,gDAAwB;AAExB,IAAI,OAAO,CAAC,IAAI,KAAK,OAAO,EAAE;IAC5B,QAAQ,CAAC,OAAO,CAAC,CAAC;CACnB;KAAM;IACL,QAAQ,CAAC,KAAK,CAAC,CAAC;CACjB;AAED,SAAS,QAAQ,CAAC,QAAgB;IAChC,6DAA6D;IAC7D,oCAAoC;IACpC,MAAM,OAAO,GAAG,cAAG,CAAC,UAAU,EAAE,CAAC;IACjC,MAAM,SAAS,GAAG,OAAO,QAAQ,EAAE,CAAC;IAEpC,8CAA8C;IAC9C,IAAI,cAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,KAAK,KAAK,EAAE;QACpC,MAAM,eAAe,GAAG,cAAI,CAAC,IAAI,CAAC,cAAI,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,CAAC,CAAC;QAEpE,kDAAkD;QAClD,cAAG,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC;KACjC;IAED,OAAO,CAAC,SAAS,GAAG,OAAO,CAAC,OAAO,CAAC,MAAM,SAAS,EAAE,CAAC,CAAC;AACzD,CAAC;AAED,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC"}
|
||||
2
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
2
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/.github/FUNDING.yml
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
tidelift: "npm/brace-expansion"
|
||||
patreon: juliangruber
|
||||
21
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
135
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/README.md
generated
vendored
Normal file
135
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/README.md
generated
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
# brace-expansion
|
||||
|
||||
[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html),
|
||||
as known from sh/bash, in JavaScript.
|
||||
|
||||
[](http://travis-ci.org/juliangruber/brace-expansion)
|
||||
[](https://www.npmjs.org/package/brace-expansion)
|
||||
[](https://greenkeeper.io/)
|
||||
|
||||
[](https://ci.testling.com/juliangruber/brace-expansion)
|
||||
|
||||
## Example
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
|
||||
expand('file-{a,b,c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('-v{,,}')
|
||||
// => ['-v', '-v', '-v']
|
||||
|
||||
expand('file{0..2}.jpg')
|
||||
// => ['file0.jpg', 'file1.jpg', 'file2.jpg']
|
||||
|
||||
expand('file-{a..c}.jpg')
|
||||
// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg']
|
||||
|
||||
expand('file{2..0}.jpg')
|
||||
// => ['file2.jpg', 'file1.jpg', 'file0.jpg']
|
||||
|
||||
expand('file{0..4..2}.jpg')
|
||||
// => ['file0.jpg', 'file2.jpg', 'file4.jpg']
|
||||
|
||||
expand('file-{a..e..2}.jpg')
|
||||
// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg']
|
||||
|
||||
expand('file{00..10..5}.jpg')
|
||||
// => ['file00.jpg', 'file05.jpg', 'file10.jpg']
|
||||
|
||||
expand('{{A..C},{a..c}}')
|
||||
// => ['A', 'B', 'C', 'a', 'b', 'c']
|
||||
|
||||
expand('ppp{,config,oe{,conf}}')
|
||||
// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf']
|
||||
```
|
||||
|
||||
## API
|
||||
|
||||
```js
|
||||
var expand = require('brace-expansion');
|
||||
```
|
||||
|
||||
### var expanded = expand(str)
|
||||
|
||||
Return an array of all possible and valid expansions of `str`. If none are
|
||||
found, `[str]` is returned.
|
||||
|
||||
Valid expansions are:
|
||||
|
||||
```js
|
||||
/^(.*,)+(.+)?$/
|
||||
// {a,b,...}
|
||||
```
|
||||
|
||||
A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
A numeric sequence from `x` to `y` inclusive, with optional increment.
|
||||
If `x` or `y` start with a leading `0`, all the numbers will be padded
|
||||
to have equal length. Negative numbers and backwards iteration work too.
|
||||
|
||||
```js
|
||||
/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/
|
||||
// {x..y[..incr]}
|
||||
```
|
||||
|
||||
An alphabetic sequence from `x` to `y` inclusive, with optional increment.
|
||||
`x` and `y` must be exactly one character, and if given, `incr` must be a
|
||||
number.
|
||||
|
||||
For compatibility reasons, the string `${` is not eligible for brace expansion.
|
||||
|
||||
## Installation
|
||||
|
||||
With [npm](https://npmjs.org) do:
|
||||
|
||||
```bash
|
||||
npm install brace-expansion
|
||||
```
|
||||
|
||||
## Contributors
|
||||
|
||||
- [Julian Gruber](https://github.com/juliangruber)
|
||||
- [Isaac Z. Schlueter](https://github.com/isaacs)
|
||||
|
||||
## Sponsors
|
||||
|
||||
This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)!
|
||||
|
||||
Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)!
|
||||
|
||||
## Security contact information
|
||||
|
||||
To report a security vulnerability, please use the
|
||||
[Tidelift security contact](https://tidelift.com/security).
|
||||
Tidelift will coordinate the fix and disclosure.
|
||||
|
||||
## License
|
||||
|
||||
(MIT)
|
||||
|
||||
Copyright (c) 2013 Julian Gruber <julian@juliangruber.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
|
||||
of the Software, and to permit persons to whom the Software is furnished to do
|
||||
so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
203
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/index.js
generated
vendored
Normal file
203
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/index.js
generated
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
var balanced = require('balanced-match');
|
||||
|
||||
module.exports = expandTop;
|
||||
|
||||
var escSlash = '\0SLASH'+Math.random()+'\0';
|
||||
var escOpen = '\0OPEN'+Math.random()+'\0';
|
||||
var escClose = '\0CLOSE'+Math.random()+'\0';
|
||||
var escComma = '\0COMMA'+Math.random()+'\0';
|
||||
var escPeriod = '\0PERIOD'+Math.random()+'\0';
|
||||
|
||||
function numeric(str) {
|
||||
return parseInt(str, 10) == str
|
||||
? parseInt(str, 10)
|
||||
: str.charCodeAt(0);
|
||||
}
|
||||
|
||||
function escapeBraces(str) {
|
||||
return str.split('\\\\').join(escSlash)
|
||||
.split('\\{').join(escOpen)
|
||||
.split('\\}').join(escClose)
|
||||
.split('\\,').join(escComma)
|
||||
.split('\\.').join(escPeriod);
|
||||
}
|
||||
|
||||
function unescapeBraces(str) {
|
||||
return str.split(escSlash).join('\\')
|
||||
.split(escOpen).join('{')
|
||||
.split(escClose).join('}')
|
||||
.split(escComma).join(',')
|
||||
.split(escPeriod).join('.');
|
||||
}
|
||||
|
||||
|
||||
// Basically just str.split(","), but handling cases
|
||||
// where we have nested braced sections, which should be
|
||||
// treated as individual members, like {a,{b,c},d}
|
||||
function parseCommaParts(str) {
|
||||
if (!str)
|
||||
return [''];
|
||||
|
||||
var parts = [];
|
||||
var m = balanced('{', '}', str);
|
||||
|
||||
if (!m)
|
||||
return str.split(',');
|
||||
|
||||
var pre = m.pre;
|
||||
var body = m.body;
|
||||
var post = m.post;
|
||||
var p = pre.split(',');
|
||||
|
||||
p[p.length-1] += '{' + body + '}';
|
||||
var postParts = parseCommaParts(post);
|
||||
if (post.length) {
|
||||
p[p.length-1] += postParts.shift();
|
||||
p.push.apply(p, postParts);
|
||||
}
|
||||
|
||||
parts.push.apply(parts, p);
|
||||
|
||||
return parts;
|
||||
}
|
||||
|
||||
function expandTop(str) {
|
||||
if (!str)
|
||||
return [];
|
||||
|
||||
// I don't know why Bash 4.3 does this, but it does.
|
||||
// Anything starting with {} will have the first two bytes preserved
|
||||
// but *only* at the top level, so {},a}b will not expand to anything,
|
||||
// but a{},b}c will be expanded to [a}c,abc].
|
||||
// One could argue that this is a bug in Bash, but since the goal of
|
||||
// this module is to match Bash's rules, we escape a leading {}
|
||||
if (str.substr(0, 2) === '{}') {
|
||||
str = '\\{\\}' + str.substr(2);
|
||||
}
|
||||
|
||||
return expand(escapeBraces(str), true).map(unescapeBraces);
|
||||
}
|
||||
|
||||
function embrace(str) {
|
||||
return '{' + str + '}';
|
||||
}
|
||||
function isPadded(el) {
|
||||
return /^-?0\d/.test(el);
|
||||
}
|
||||
|
||||
function lte(i, y) {
|
||||
return i <= y;
|
||||
}
|
||||
function gte(i, y) {
|
||||
return i >= y;
|
||||
}
|
||||
|
||||
function expand(str, isTop) {
|
||||
var expansions = [];
|
||||
|
||||
var m = balanced('{', '}', str);
|
||||
if (!m) return [str];
|
||||
|
||||
// no need to expand pre, since it is guaranteed to be free of brace-sets
|
||||
var pre = m.pre;
|
||||
var post = m.post.length
|
||||
? expand(m.post, false)
|
||||
: [''];
|
||||
|
||||
if (/\$$/.test(m.pre)) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre+ '{' + m.body + '}' + post[k];
|
||||
expansions.push(expansion);
|
||||
}
|
||||
} else {
|
||||
var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
|
||||
var isSequence = isNumericSequence || isAlphaSequence;
|
||||
var isOptions = m.body.indexOf(',') >= 0;
|
||||
if (!isSequence && !isOptions) {
|
||||
// {a},b}
|
||||
if (m.post.match(/,(?!,).*\}/)) {
|
||||
str = m.pre + '{' + m.body + escClose + m.post;
|
||||
return expand(str);
|
||||
}
|
||||
return [str];
|
||||
}
|
||||
|
||||
var n;
|
||||
if (isSequence) {
|
||||
n = m.body.split(/\.\./);
|
||||
} else {
|
||||
n = parseCommaParts(m.body);
|
||||
if (n.length === 1) {
|
||||
// x{{a,b}}y ==> x{a}y x{b}y
|
||||
n = expand(n[0], false).map(embrace);
|
||||
if (n.length === 1) {
|
||||
return post.map(function(p) {
|
||||
return m.pre + n[0] + p;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, n is the parts, and we know it's not a comma set
|
||||
// with a single entry.
|
||||
var N;
|
||||
|
||||
if (isSequence) {
|
||||
var x = numeric(n[0]);
|
||||
var y = numeric(n[1]);
|
||||
var width = Math.max(n[0].length, n[1].length)
|
||||
var incr = n.length == 3
|
||||
? Math.abs(numeric(n[2]))
|
||||
: 1;
|
||||
var test = lte;
|
||||
var reverse = y < x;
|
||||
if (reverse) {
|
||||
incr *= -1;
|
||||
test = gte;
|
||||
}
|
||||
var pad = n.some(isPadded);
|
||||
|
||||
N = [];
|
||||
|
||||
for (var i = x; test(i, y); i += incr) {
|
||||
var c;
|
||||
if (isAlphaSequence) {
|
||||
c = String.fromCharCode(i);
|
||||
if (c === '\\')
|
||||
c = '';
|
||||
} else {
|
||||
c = String(i);
|
||||
if (pad) {
|
||||
var need = width - c.length;
|
||||
if (need > 0) {
|
||||
var z = new Array(need + 1).join('0');
|
||||
if (i < 0)
|
||||
c = '-' + z + c.slice(1);
|
||||
else
|
||||
c = z + c;
|
||||
}
|
||||
}
|
||||
}
|
||||
N.push(c);
|
||||
}
|
||||
} else {
|
||||
N = [];
|
||||
|
||||
for (var j = 0; j < n.length; j++) {
|
||||
N.push.apply(N, expand(n[j], false));
|
||||
}
|
||||
}
|
||||
|
||||
for (var j = 0; j < N.length; j++) {
|
||||
for (var k = 0; k < post.length; k++) {
|
||||
var expansion = pre + N[j] + post[k];
|
||||
if (!isTop || isSequence || expansion)
|
||||
expansions.push(expansion);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return expansions;
|
||||
}
|
||||
|
||||
49
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/package.json
generated
vendored
Normal file
49
desktop-operator/node_modules/@electron/universal/node_modules/brace-expansion/package.json
generated
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"name": "brace-expansion",
|
||||
"description": "Brace expansion as known from sh/bash",
|
||||
"version": "2.0.2",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/juliangruber/brace-expansion.git"
|
||||
},
|
||||
"homepage": "https://github.com/juliangruber/brace-expansion",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "tape test/*.js",
|
||||
"gentest": "bash test/generate.sh",
|
||||
"bench": "matcha test/perf/bench.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"balanced-match": "^1.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@c4312/matcha": "^1.3.1",
|
||||
"tape": "^4.6.0"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": {
|
||||
"name": "Julian Gruber",
|
||||
"email": "mail@juliangruber.com",
|
||||
"url": "http://juliangruber.com"
|
||||
},
|
||||
"license": "MIT",
|
||||
"testling": {
|
||||
"files": "test/*.js",
|
||||
"browsers": [
|
||||
"ie/8..latest",
|
||||
"firefox/20..latest",
|
||||
"firefox/nightly",
|
||||
"chrome/25..latest",
|
||||
"chrome/canary",
|
||||
"opera/12..latest",
|
||||
"opera/next",
|
||||
"safari/5.1..latest",
|
||||
"ipad/6.0..latest",
|
||||
"iphone/6.0..latest",
|
||||
"android-browser/4.2..latest"
|
||||
]
|
||||
},
|
||||
"publishConfig": {
|
||||
"tag": "2.x"
|
||||
}
|
||||
}
|
||||
15
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2011-2024 JP Richardson
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
294
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/README.md
generated
vendored
Normal file
294
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/README.md
generated
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
Node.js: fs-extra
|
||||
=================
|
||||
|
||||
`fs-extra` adds file system methods that aren't included in the native `fs` module and adds promise support to the `fs` methods. It also uses [`graceful-fs`](https://github.com/isaacs/node-graceful-fs) to prevent `EMFILE` errors. It should be a drop in replacement for `fs`.
|
||||
|
||||
[](https://www.npmjs.org/package/fs-extra)
|
||||
[](https://github.com/jprichardson/node-fs-extra/blob/master/LICENSE)
|
||||
[](https://github.com/jprichardson/node-fs-extra/actions/workflows/ci.yml?query=branch%3Amaster)
|
||||
[](https://www.npmjs.org/package/fs-extra)
|
||||
[](https://standardjs.com)
|
||||
|
||||
Why?
|
||||
----
|
||||
|
||||
I got tired of including `mkdirp`, `rimraf`, and `ncp` in most of my projects.
|
||||
|
||||
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
npm install fs-extra
|
||||
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
### CommonJS
|
||||
|
||||
`fs-extra` is a drop in replacement for native `fs`. All methods in `fs` are attached to `fs-extra`. All `fs` methods return promises if the callback isn't passed.
|
||||
|
||||
You don't ever need to include the original `fs` module again:
|
||||
|
||||
```js
|
||||
const fs = require('fs') // this is no longer necessary
|
||||
```
|
||||
|
||||
you can now do this:
|
||||
|
||||
```js
|
||||
const fs = require('fs-extra')
|
||||
```
|
||||
|
||||
or if you prefer to make it clear that you're using `fs-extra` and not `fs`, you may want
|
||||
to name your `fs` variable `fse` like so:
|
||||
|
||||
```js
|
||||
const fse = require('fs-extra')
|
||||
```
|
||||
|
||||
you can also keep both, but it's redundant:
|
||||
|
||||
```js
|
||||
const fs = require('fs')
|
||||
const fse = require('fs-extra')
|
||||
```
|
||||
|
||||
**NOTE:** The deprecated constants `fs.F_OK`, `fs.R_OK`, `fs.W_OK`, & `fs.X_OK` are not exported on Node.js v24.0.0+; please use their `fs.constants` equivalents.
|
||||
|
||||
### ESM
|
||||
|
||||
There is also an `fs-extra/esm` import, that supports both default and named exports. However, note that `fs` methods are not included in `fs-extra/esm`; you still need to import `fs` and/or `fs/promises` seperately:
|
||||
|
||||
```js
|
||||
import { readFileSync } from 'fs'
|
||||
import { readFile } from 'fs/promises'
|
||||
import { outputFile, outputFileSync } from 'fs-extra/esm'
|
||||
```
|
||||
|
||||
Default exports are supported:
|
||||
|
||||
```js
|
||||
import fs from 'fs'
|
||||
import fse from 'fs-extra/esm'
|
||||
// fse.readFileSync is not a function; must use fs.readFileSync
|
||||
```
|
||||
|
||||
but you probably want to just use regular `fs-extra` instead of `fs-extra/esm` for default exports:
|
||||
|
||||
```js
|
||||
import fs from 'fs-extra'
|
||||
// both fs and fs-extra methods are defined
|
||||
```
|
||||
|
||||
Sync vs Async vs Async/Await
|
||||
-------------
|
||||
Most methods are async by default. All async methods will return a promise if the callback isn't passed.
|
||||
|
||||
Sync methods on the other hand will throw if an error occurs.
|
||||
|
||||
Also Async/Await will throw an error if one occurs.
|
||||
|
||||
Example:
|
||||
|
||||
```js
|
||||
const fs = require('fs-extra')
|
||||
|
||||
// Async with promises:
|
||||
fs.copy('/tmp/myfile', '/tmp/mynewfile')
|
||||
.then(() => console.log('success!'))
|
||||
.catch(err => console.error(err))
|
||||
|
||||
// Async with callbacks:
|
||||
fs.copy('/tmp/myfile', '/tmp/mynewfile', err => {
|
||||
if (err) return console.error(err)
|
||||
console.log('success!')
|
||||
})
|
||||
|
||||
// Sync:
|
||||
try {
|
||||
fs.copySync('/tmp/myfile', '/tmp/mynewfile')
|
||||
console.log('success!')
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
|
||||
// Async/Await:
|
||||
async function copyFiles () {
|
||||
try {
|
||||
await fs.copy('/tmp/myfile', '/tmp/mynewfile')
|
||||
console.log('success!')
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
}
|
||||
|
||||
copyFiles()
|
||||
```
|
||||
|
||||
|
||||
Methods
|
||||
-------
|
||||
|
||||
### Async
|
||||
|
||||
- [copy](docs/copy.md)
|
||||
- [emptyDir](docs/emptyDir.md)
|
||||
- [ensureFile](docs/ensureFile.md)
|
||||
- [ensureDir](docs/ensureDir.md)
|
||||
- [ensureLink](docs/ensureLink.md)
|
||||
- [ensureSymlink](docs/ensureSymlink.md)
|
||||
- [mkdirp](docs/ensureDir.md)
|
||||
- [mkdirs](docs/ensureDir.md)
|
||||
- [move](docs/move.md)
|
||||
- [outputFile](docs/outputFile.md)
|
||||
- [outputJson](docs/outputJson.md)
|
||||
- [pathExists](docs/pathExists.md)
|
||||
- [readJson](docs/readJson.md)
|
||||
- [remove](docs/remove.md)
|
||||
- [writeJson](docs/writeJson.md)
|
||||
|
||||
### Sync
|
||||
|
||||
- [copySync](docs/copy-sync.md)
|
||||
- [emptyDirSync](docs/emptyDir-sync.md)
|
||||
- [ensureFileSync](docs/ensureFile-sync.md)
|
||||
- [ensureDirSync](docs/ensureDir-sync.md)
|
||||
- [ensureLinkSync](docs/ensureLink-sync.md)
|
||||
- [ensureSymlinkSync](docs/ensureSymlink-sync.md)
|
||||
- [mkdirpSync](docs/ensureDir-sync.md)
|
||||
- [mkdirsSync](docs/ensureDir-sync.md)
|
||||
- [moveSync](docs/move-sync.md)
|
||||
- [outputFileSync](docs/outputFile-sync.md)
|
||||
- [outputJsonSync](docs/outputJson-sync.md)
|
||||
- [pathExistsSync](docs/pathExists-sync.md)
|
||||
- [readJsonSync](docs/readJson-sync.md)
|
||||
- [removeSync](docs/remove-sync.md)
|
||||
- [writeJsonSync](docs/writeJson-sync.md)
|
||||
|
||||
|
||||
**NOTE:** You can still use the native Node.js methods. They are promisified and copied over to `fs-extra`. See [notes on `fs.read()`, `fs.write()`, & `fs.writev()`](docs/fs-read-write-writev.md)
|
||||
|
||||
### What happened to `walk()` and `walkSync()`?
|
||||
|
||||
They were removed from `fs-extra` in v2.0.0. If you need the functionality, `walk` and `walkSync` are available as separate packages, [`klaw`](https://github.com/jprichardson/node-klaw) and [`klaw-sync`](https://github.com/manidlou/node-klaw-sync).
|
||||
|
||||
|
||||
Third Party
|
||||
-----------
|
||||
|
||||
### CLI
|
||||
|
||||
[fse-cli](https://www.npmjs.com/package/@atao60/fse-cli) allows you to run `fs-extra` from a console or from [npm](https://www.npmjs.com) scripts.
|
||||
|
||||
### TypeScript
|
||||
|
||||
If you like TypeScript, you can use `fs-extra` with it: https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/fs-extra
|
||||
|
||||
|
||||
### File / Directory Watching
|
||||
|
||||
If you want to watch for changes to files or directories, then you should use [chokidar](https://github.com/paulmillr/chokidar).
|
||||
|
||||
### Obtain Filesystem (Devices, Partitions) Information
|
||||
|
||||
[fs-filesystem](https://github.com/arthurintelligence/node-fs-filesystem) allows you to read the state of the filesystem of the host on which it is run. It returns information about both the devices and the partitions (volumes) of the system.
|
||||
|
||||
### Misc.
|
||||
|
||||
- [fs-extra-debug](https://github.com/jdxcode/fs-extra-debug) - Send your fs-extra calls to [debug](https://npmjs.org/package/debug).
|
||||
- [mfs](https://github.com/cadorn/mfs) - Monitor your fs-extra calls.
|
||||
|
||||
|
||||
|
||||
Hacking on fs-extra
|
||||
-------------------
|
||||
|
||||
Wanna hack on `fs-extra`? Great! Your help is needed! [fs-extra is one of the most depended upon Node.js packages](http://nodei.co/npm/fs-extra.png?downloads=true&downloadRank=true&stars=true). This project
|
||||
uses [JavaScript Standard Style](https://github.com/feross/standard) - if the name or style choices bother you,
|
||||
you're gonna have to get over it :) If `standard` is good enough for `npm`, it's good enough for `fs-extra`.
|
||||
|
||||
[](https://github.com/feross/standard)
|
||||
|
||||
What's needed?
|
||||
- First, take a look at existing issues. Those are probably going to be where the priority lies.
|
||||
- More tests for edge cases. Specifically on different platforms. There can never be enough tests.
|
||||
- Improve test coverage.
|
||||
|
||||
Note: If you make any big changes, **you should definitely file an issue for discussion first.**
|
||||
|
||||
### Running the Test Suite
|
||||
|
||||
fs-extra contains hundreds of tests.
|
||||
|
||||
- `npm run lint`: runs the linter ([standard](http://standardjs.com/))
|
||||
- `npm run unit`: runs the unit tests
|
||||
- `npm run unit-esm`: runs tests for `fs-extra/esm` exports
|
||||
- `npm test`: runs the linter and all tests
|
||||
|
||||
When running unit tests, set the environment variable `CROSS_DEVICE_PATH` to the absolute path of an empty directory on another device (like a thumb drive) to enable cross-device move tests.
|
||||
|
||||
|
||||
### Windows
|
||||
|
||||
If you run the tests on the Windows and receive a lot of symbolic link `EPERM` permission errors, it's
|
||||
because on Windows you need elevated privilege to create symbolic links. You can add this to your Windows's
|
||||
account by following the instructions here: http://superuser.com/questions/104845/permission-to-make-symbolic-links-in-windows-7
|
||||
However, I didn't have much luck doing this.
|
||||
|
||||
Since I develop on Mac OS X, I use VMWare Fusion for Windows testing. I create a shared folder that I map to a drive on Windows.
|
||||
I open the `Node.js command prompt` and run as `Administrator`. I then map the network drive running the following command:
|
||||
|
||||
net use z: "\\vmware-host\Shared Folders"
|
||||
|
||||
I can then navigate to my `fs-extra` directory and run the tests.
|
||||
|
||||
|
||||
Naming
|
||||
------
|
||||
|
||||
I put a lot of thought into the naming of these functions. Inspired by @coolaj86's request. So he deserves much of the credit for raising the issue. See discussion(s) here:
|
||||
|
||||
* https://github.com/jprichardson/node-fs-extra/issues/2
|
||||
* https://github.com/flatiron/utile/issues/11
|
||||
* https://github.com/ryanmcgrath/wrench-js/issues/29
|
||||
* https://github.com/substack/node-mkdirp/issues/17
|
||||
|
||||
First, I believe that in as many cases as possible, the [Node.js naming schemes](http://nodejs.org/api/fs.html) should be chosen. However, there are problems with the Node.js own naming schemes.
|
||||
|
||||
For example, `fs.readFile()` and `fs.readdir()`: the **F** is capitalized in *File* and the **d** is not capitalized in *dir*. Perhaps a bit pedantic, but they should still be consistent. Also, Node.js has chosen a lot of POSIX naming schemes, which I believe is great. See: `fs.mkdir()`, `fs.rmdir()`, `fs.chown()`, etc.
|
||||
|
||||
We have a dilemma though. How do you consistently name methods that perform the following POSIX commands: `cp`, `cp -r`, `mkdir -p`, and `rm -rf`?
|
||||
|
||||
My perspective: when in doubt, err on the side of simplicity. A directory is just a hierarchical grouping of directories and files. Consider that for a moment. So when you want to copy it or remove it, in most cases you'll want to copy or remove all of its contents. When you want to create a directory, if the directory that it's suppose to be contained in does not exist, then in most cases you'll want to create that too.
|
||||
|
||||
So, if you want to remove a file or a directory regardless of whether it has contents, just call `fs.remove(path)`. If you want to copy a file or a directory whether it has contents, just call `fs.copy(source, destination)`. If you want to create a directory regardless of whether its parent directories exist, just call `fs.mkdirs(path)` or `fs.mkdirp(path)`.
|
||||
|
||||
|
||||
Credit
|
||||
------
|
||||
|
||||
`fs-extra` wouldn't be possible without using the modules from the following authors:
|
||||
|
||||
- [Isaac Shlueter](https://github.com/isaacs)
|
||||
- [Charlie McConnel](https://github.com/avianflu)
|
||||
- [James Halliday](https://github.com/substack)
|
||||
- [Andrew Kelley](https://github.com/andrewrk)
|
||||
|
||||
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Licensed under MIT
|
||||
|
||||
Copyright (c) 2011-2024 [JP Richardson](https://github.com/jprichardson)
|
||||
|
||||
[1]: http://nodejs.org/docs/latest/api/fs.html
|
||||
|
||||
|
||||
[jsonfile]: https://github.com/jprichardson/node-jsonfile
|
||||
171
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/copy-sync.js
generated
vendored
Normal file
171
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/copy-sync.js
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const mkdirsSync = require('../mkdirs').mkdirsSync
|
||||
const utimesMillisSync = require('../util/utimes').utimesMillisSync
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function copySync (src, dest, opts) {
|
||||
if (typeof opts === 'function') {
|
||||
opts = { filter: opts }
|
||||
}
|
||||
|
||||
opts = opts || {}
|
||||
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
||||
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
||||
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
process.emitWarning(
|
||||
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
||||
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
||||
'Warning', 'fs-extra-WARN0002'
|
||||
)
|
||||
}
|
||||
|
||||
const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy', opts)
|
||||
stat.checkParentPathsSync(src, srcStat, dest, 'copy')
|
||||
if (opts.filter && !opts.filter(src, dest)) return
|
||||
const destParent = path.dirname(dest)
|
||||
if (!fs.existsSync(destParent)) mkdirsSync(destParent)
|
||||
return getStats(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function getStats (destStat, src, dest, opts) {
|
||||
const statSync = opts.dereference ? fs.statSync : fs.lstatSync
|
||||
const srcStat = statSync(src)
|
||||
|
||||
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
|
||||
else if (srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts)
|
||||
else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
|
||||
else if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
|
||||
else if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
|
||||
throw new Error(`Unknown file: ${src}`)
|
||||
}
|
||||
|
||||
function onFile (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return copyFile(srcStat, src, dest, opts)
|
||||
return mayCopyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
|
||||
function mayCopyFile (srcStat, src, dest, opts) {
|
||||
if (opts.overwrite) {
|
||||
fs.unlinkSync(dest)
|
||||
return copyFile(srcStat, src, dest, opts)
|
||||
} else if (opts.errorOnExist) {
|
||||
throw new Error(`'${dest}' already exists`)
|
||||
}
|
||||
}
|
||||
|
||||
function copyFile (srcStat, src, dest, opts) {
|
||||
fs.copyFileSync(src, dest)
|
||||
if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest)
|
||||
return setDestMode(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
function handleTimestamps (srcMode, src, dest) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode)
|
||||
return setDestTimestamps(src, dest)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return setDestMode(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
function setDestMode (dest, srcMode) {
|
||||
return fs.chmodSync(dest, srcMode)
|
||||
}
|
||||
|
||||
function setDestTimestamps (src, dest) {
|
||||
// The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = fs.statSync(src)
|
||||
return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
function onDir (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts)
|
||||
return copyDir(src, dest, opts)
|
||||
}
|
||||
|
||||
function mkDirAndCopy (srcMode, src, dest, opts) {
|
||||
fs.mkdirSync(dest)
|
||||
copyDir(src, dest, opts)
|
||||
return setDestMode(dest, srcMode)
|
||||
}
|
||||
|
||||
function copyDir (src, dest, opts) {
|
||||
const dir = fs.opendirSync(src)
|
||||
|
||||
try {
|
||||
let dirent
|
||||
|
||||
while ((dirent = dir.readSync()) !== null) {
|
||||
copyDirItem(dirent.name, src, dest, opts)
|
||||
}
|
||||
} finally {
|
||||
dir.closeSync()
|
||||
}
|
||||
}
|
||||
|
||||
function copyDirItem (item, src, dest, opts) {
|
||||
const srcItem = path.join(src, item)
|
||||
const destItem = path.join(dest, item)
|
||||
if (opts.filter && !opts.filter(srcItem, destItem)) return
|
||||
const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy', opts)
|
||||
return getStats(destStat, srcItem, destItem, opts)
|
||||
}
|
||||
|
||||
function onLink (destStat, src, dest, opts) {
|
||||
let resolvedSrc = fs.readlinkSync(src)
|
||||
if (opts.dereference) {
|
||||
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
||||
}
|
||||
|
||||
if (!destStat) {
|
||||
return fs.symlinkSync(resolvedSrc, dest)
|
||||
} else {
|
||||
let resolvedDest
|
||||
try {
|
||||
resolvedDest = fs.readlinkSync(dest)
|
||||
} catch (err) {
|
||||
// dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest)
|
||||
throw err
|
||||
}
|
||||
if (opts.dereference) {
|
||||
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
||||
}
|
||||
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
|
||||
}
|
||||
|
||||
// prevent copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
|
||||
}
|
||||
return copyLink(resolvedSrc, dest)
|
||||
}
|
||||
}
|
||||
|
||||
function copyLink (resolvedSrc, dest) {
|
||||
fs.unlinkSync(dest)
|
||||
return fs.symlinkSync(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
module.exports = copySync
|
||||
175
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/copy.js
generated
vendored
Normal file
175
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/copy.js
generated
vendored
Normal file
@@ -0,0 +1,175 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const { mkdirs } = require('../mkdirs')
|
||||
const { pathExists } = require('../path-exists')
|
||||
const { utimesMillis } = require('../util/utimes')
|
||||
const stat = require('../util/stat')
|
||||
const { asyncIteratorConcurrentProcess } = require('../util/async')
|
||||
|
||||
async function copy (src, dest, opts = {}) {
|
||||
if (typeof opts === 'function') {
|
||||
opts = { filter: opts }
|
||||
}
|
||||
|
||||
opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now
|
||||
opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber
|
||||
|
||||
// Warn about using preserveTimestamps on 32-bit node
|
||||
if (opts.preserveTimestamps && process.arch === 'ia32') {
|
||||
process.emitWarning(
|
||||
'Using the preserveTimestamps option in 32-bit node is not recommended;\n\n' +
|
||||
'\tsee https://github.com/jprichardson/node-fs-extra/issues/269',
|
||||
'Warning', 'fs-extra-WARN0001'
|
||||
)
|
||||
}
|
||||
|
||||
const { srcStat, destStat } = await stat.checkPaths(src, dest, 'copy', opts)
|
||||
|
||||
await stat.checkParentPaths(src, srcStat, dest, 'copy')
|
||||
|
||||
const include = await runFilter(src, dest, opts)
|
||||
|
||||
if (!include) return
|
||||
|
||||
// check if the parent of dest exists, and create it if it doesn't exist
|
||||
const destParent = path.dirname(dest)
|
||||
const dirExists = await pathExists(destParent)
|
||||
if (!dirExists) {
|
||||
await mkdirs(destParent)
|
||||
}
|
||||
|
||||
await getStatsAndPerformCopy(destStat, src, dest, opts)
|
||||
}
|
||||
|
||||
async function runFilter (src, dest, opts) {
|
||||
if (!opts.filter) return true
|
||||
return opts.filter(src, dest)
|
||||
}
|
||||
|
||||
async function getStatsAndPerformCopy (destStat, src, dest, opts) {
|
||||
const statFn = opts.dereference ? fs.stat : fs.lstat
|
||||
const srcStat = await statFn(src)
|
||||
|
||||
if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts)
|
||||
|
||||
if (
|
||||
srcStat.isFile() ||
|
||||
srcStat.isCharacterDevice() ||
|
||||
srcStat.isBlockDevice()
|
||||
) return onFile(srcStat, destStat, src, dest, opts)
|
||||
|
||||
if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts)
|
||||
if (srcStat.isSocket()) throw new Error(`Cannot copy a socket file: ${src}`)
|
||||
if (srcStat.isFIFO()) throw new Error(`Cannot copy a FIFO pipe: ${src}`)
|
||||
throw new Error(`Unknown file: ${src}`)
|
||||
}
|
||||
|
||||
async function onFile (srcStat, destStat, src, dest, opts) {
|
||||
if (!destStat) return copyFile(srcStat, src, dest, opts)
|
||||
|
||||
if (opts.overwrite) {
|
||||
await fs.unlink(dest)
|
||||
return copyFile(srcStat, src, dest, opts)
|
||||
}
|
||||
if (opts.errorOnExist) {
|
||||
throw new Error(`'${dest}' already exists`)
|
||||
}
|
||||
}
|
||||
|
||||
async function copyFile (srcStat, src, dest, opts) {
|
||||
await fs.copyFile(src, dest)
|
||||
if (opts.preserveTimestamps) {
|
||||
// Make sure the file is writable before setting the timestamp
|
||||
// otherwise open fails with EPERM when invoked with 'r+'
|
||||
// (through utimes call)
|
||||
if (fileIsNotWritable(srcStat.mode)) {
|
||||
await makeFileWritable(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
// Set timestamps and mode correspondingly
|
||||
|
||||
// Note that The initial srcStat.atime cannot be trusted
|
||||
// because it is modified by the read(2) system call
|
||||
// (See https://nodejs.org/api/fs.html#fs_stat_time_values)
|
||||
const updatedSrcStat = await fs.stat(src)
|
||||
await utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime)
|
||||
}
|
||||
|
||||
return fs.chmod(dest, srcStat.mode)
|
||||
}
|
||||
|
||||
function fileIsNotWritable (srcMode) {
|
||||
return (srcMode & 0o200) === 0
|
||||
}
|
||||
|
||||
function makeFileWritable (dest, srcMode) {
|
||||
return fs.chmod(dest, srcMode | 0o200)
|
||||
}
|
||||
|
||||
async function onDir (srcStat, destStat, src, dest, opts) {
|
||||
// the dest directory might not exist, create it
|
||||
if (!destStat) {
|
||||
await fs.mkdir(dest)
|
||||
}
|
||||
|
||||
// iterate through the files in the current directory to copy everything
|
||||
await asyncIteratorConcurrentProcess(await fs.opendir(src), async (item) => {
|
||||
const srcItem = path.join(src, item.name)
|
||||
const destItem = path.join(dest, item.name)
|
||||
|
||||
const include = await runFilter(srcItem, destItem, opts)
|
||||
// only copy the item if it matches the filter function
|
||||
if (include) {
|
||||
const { destStat } = await stat.checkPaths(srcItem, destItem, 'copy', opts)
|
||||
// If the item is a copyable file, `getStatsAndPerformCopy` will copy it
|
||||
// If the item is a directory, `getStatsAndPerformCopy` will call `onDir` recursively
|
||||
await getStatsAndPerformCopy(destStat, srcItem, destItem, opts)
|
||||
}
|
||||
})
|
||||
|
||||
if (!destStat) {
|
||||
await fs.chmod(dest, srcStat.mode)
|
||||
}
|
||||
}
|
||||
|
||||
async function onLink (destStat, src, dest, opts) {
|
||||
let resolvedSrc = await fs.readlink(src)
|
||||
if (opts.dereference) {
|
||||
resolvedSrc = path.resolve(process.cwd(), resolvedSrc)
|
||||
}
|
||||
if (!destStat) {
|
||||
return fs.symlink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
let resolvedDest = null
|
||||
try {
|
||||
resolvedDest = await fs.readlink(dest)
|
||||
} catch (e) {
|
||||
// dest exists and is a regular file or directory,
|
||||
// Windows may throw UNKNOWN error. If dest already exists,
|
||||
// fs throws error anyway, so no need to guard against it here.
|
||||
if (e.code === 'EINVAL' || e.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest)
|
||||
throw e
|
||||
}
|
||||
if (opts.dereference) {
|
||||
resolvedDest = path.resolve(process.cwd(), resolvedDest)
|
||||
}
|
||||
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
|
||||
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)
|
||||
}
|
||||
|
||||
// do not copy if src is a subdir of dest since unlinking
|
||||
// dest in this case would result in removing src contents
|
||||
// and therefore a broken symlink would be created.
|
||||
if (stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
|
||||
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)
|
||||
}
|
||||
|
||||
// copy the link
|
||||
await fs.unlink(dest)
|
||||
return fs.symlink(resolvedSrc, dest)
|
||||
}
|
||||
|
||||
module.exports = copy
|
||||
7
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/index.js
generated
vendored
Normal file
7
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/copy/index.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
module.exports = {
|
||||
copy: u(require('./copy')),
|
||||
copySync: require('./copy-sync')
|
||||
}
|
||||
39
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/empty/index.js
generated
vendored
Normal file
39
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/empty/index.js
generated
vendored
Normal file
@@ -0,0 +1,39 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const remove = require('../remove')
|
||||
|
||||
const emptyDir = u(async function emptyDir (dir) {
|
||||
let items
|
||||
try {
|
||||
items = await fs.readdir(dir)
|
||||
} catch {
|
||||
return mkdir.mkdirs(dir)
|
||||
}
|
||||
|
||||
return Promise.all(items.map(item => remove.remove(path.join(dir, item))))
|
||||
})
|
||||
|
||||
function emptyDirSync (dir) {
|
||||
let items
|
||||
try {
|
||||
items = fs.readdirSync(dir)
|
||||
} catch {
|
||||
return mkdir.mkdirsSync(dir)
|
||||
}
|
||||
|
||||
items.forEach(item => {
|
||||
item = path.join(dir, item)
|
||||
remove.removeSync(item)
|
||||
})
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
emptyDirSync,
|
||||
emptydirSync: emptyDirSync,
|
||||
emptyDir,
|
||||
emptydir: emptyDir
|
||||
}
|
||||
66
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/file.js
generated
vendored
Normal file
66
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/file.js
generated
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const path = require('path')
|
||||
const fs = require('../fs')
|
||||
const mkdir = require('../mkdirs')
|
||||
|
||||
async function createFile (file) {
|
||||
let stats
|
||||
try {
|
||||
stats = await fs.stat(file)
|
||||
} catch { }
|
||||
if (stats && stats.isFile()) return
|
||||
|
||||
const dir = path.dirname(file)
|
||||
|
||||
let dirStats = null
|
||||
try {
|
||||
dirStats = await fs.stat(dir)
|
||||
} catch (err) {
|
||||
// if the directory doesn't exist, make it
|
||||
if (err.code === 'ENOENT') {
|
||||
await mkdir.mkdirs(dir)
|
||||
await fs.writeFile(file, '')
|
||||
return
|
||||
} else {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
if (dirStats.isDirectory()) {
|
||||
await fs.writeFile(file, '')
|
||||
} else {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
await fs.readdir(dir)
|
||||
}
|
||||
}
|
||||
|
||||
function createFileSync (file) {
|
||||
let stats
|
||||
try {
|
||||
stats = fs.statSync(file)
|
||||
} catch { }
|
||||
if (stats && stats.isFile()) return
|
||||
|
||||
const dir = path.dirname(file)
|
||||
try {
|
||||
if (!fs.statSync(dir).isDirectory()) {
|
||||
// parent is not a directory
|
||||
// This is just to cause an internal ENOTDIR error to be thrown
|
||||
fs.readdirSync(dir)
|
||||
}
|
||||
} catch (err) {
|
||||
// If the stat call above failed because the directory doesn't exist, create it
|
||||
if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir)
|
||||
else throw err
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, '')
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createFile: u(createFile),
|
||||
createFileSync
|
||||
}
|
||||
23
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/index.js
generated
vendored
Normal file
23
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/index.js
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
'use strict'
|
||||
|
||||
const { createFile, createFileSync } = require('./file')
|
||||
const { createLink, createLinkSync } = require('./link')
|
||||
const { createSymlink, createSymlinkSync } = require('./symlink')
|
||||
|
||||
module.exports = {
|
||||
// file
|
||||
createFile,
|
||||
createFileSync,
|
||||
ensureFile: createFile,
|
||||
ensureFileSync: createFileSync,
|
||||
// link
|
||||
createLink,
|
||||
createLinkSync,
|
||||
ensureLink: createLink,
|
||||
ensureLinkSync: createLinkSync,
|
||||
// symlink
|
||||
createSymlink,
|
||||
createSymlinkSync,
|
||||
ensureSymlink: createSymlink,
|
||||
ensureSymlinkSync: createSymlinkSync
|
||||
}
|
||||
64
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/link.js
generated
vendored
Normal file
64
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/link.js
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const path = require('path')
|
||||
const fs = require('../fs')
|
||||
const mkdir = require('../mkdirs')
|
||||
const { pathExists } = require('../path-exists')
|
||||
const { areIdentical } = require('../util/stat')
|
||||
|
||||
async function createLink (srcpath, dstpath) {
|
||||
let dstStat
|
||||
try {
|
||||
dstStat = await fs.lstat(dstpath)
|
||||
} catch {
|
||||
// ignore error
|
||||
}
|
||||
|
||||
let srcStat
|
||||
try {
|
||||
srcStat = await fs.lstat(srcpath)
|
||||
} catch (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureLink')
|
||||
throw err
|
||||
}
|
||||
|
||||
if (dstStat && areIdentical(srcStat, dstStat)) return
|
||||
|
||||
const dir = path.dirname(dstpath)
|
||||
|
||||
const dirExists = await pathExists(dir)
|
||||
|
||||
if (!dirExists) {
|
||||
await mkdir.mkdirs(dir)
|
||||
}
|
||||
|
||||
await fs.link(srcpath, dstpath)
|
||||
}
|
||||
|
||||
function createLinkSync (srcpath, dstpath) {
|
||||
let dstStat
|
||||
try {
|
||||
dstStat = fs.lstatSync(dstpath)
|
||||
} catch {}
|
||||
|
||||
try {
|
||||
const srcStat = fs.lstatSync(srcpath)
|
||||
if (dstStat && areIdentical(srcStat, dstStat)) return
|
||||
} catch (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureLink')
|
||||
throw err
|
||||
}
|
||||
|
||||
const dir = path.dirname(dstpath)
|
||||
const dirExists = fs.existsSync(dir)
|
||||
if (dirExists) return fs.linkSync(srcpath, dstpath)
|
||||
mkdir.mkdirsSync(dir)
|
||||
|
||||
return fs.linkSync(srcpath, dstpath)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createLink: u(createLink),
|
||||
createLinkSync
|
||||
}
|
||||
101
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
Normal file
101
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink-paths.js
generated
vendored
Normal file
@@ -0,0 +1,101 @@
|
||||
'use strict'
|
||||
|
||||
const path = require('path')
|
||||
const fs = require('../fs')
|
||||
const { pathExists } = require('../path-exists')
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
|
||||
/**
|
||||
* Function that returns two types of paths, one relative to symlink, and one
|
||||
* relative to the current working directory. Checks if path is absolute or
|
||||
* relative. If the path is relative, this function checks if the path is
|
||||
* relative to symlink or relative to current working directory. This is an
|
||||
* initiative to find a smarter `srcpath` to supply when building symlinks.
|
||||
* This allows you to determine which path to use out of one of three possible
|
||||
* types of source paths. The first is an absolute path. This is detected by
|
||||
* `path.isAbsolute()`. When an absolute path is provided, it is checked to
|
||||
* see if it exists. If it does it's used, if not an error is returned
|
||||
* (callback)/ thrown (sync). The other two options for `srcpath` are a
|
||||
* relative url. By default Node's `fs.symlink` works by creating a symlink
|
||||
* using `dstpath` and expects the `srcpath` to be relative to the newly
|
||||
* created symlink. If you provide a `srcpath` that does not exist on the file
|
||||
* system it results in a broken symlink. To minimize this, the function
|
||||
* checks to see if the 'relative to symlink' source file exists, and if it
|
||||
* does it will use it. If it does not, it checks if there's a file that
|
||||
* exists that is relative to the current working directory, if does its used.
|
||||
* This preserves the expectations of the original fs.symlink spec and adds
|
||||
* the ability to pass in `relative to current working direcotry` paths.
|
||||
*/
|
||||
|
||||
async function symlinkPaths (srcpath, dstpath) {
|
||||
if (path.isAbsolute(srcpath)) {
|
||||
try {
|
||||
await fs.lstat(srcpath)
|
||||
} catch (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureSymlink')
|
||||
throw err
|
||||
}
|
||||
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
}
|
||||
}
|
||||
|
||||
const dstdir = path.dirname(dstpath)
|
||||
const relativeToDst = path.join(dstdir, srcpath)
|
||||
|
||||
const exists = await pathExists(relativeToDst)
|
||||
if (exists) {
|
||||
return {
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
await fs.lstat(srcpath)
|
||||
} catch (err) {
|
||||
err.message = err.message.replace('lstat', 'ensureSymlink')
|
||||
throw err
|
||||
}
|
||||
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
}
|
||||
}
|
||||
|
||||
function symlinkPathsSync (srcpath, dstpath) {
|
||||
if (path.isAbsolute(srcpath)) {
|
||||
const exists = fs.existsSync(srcpath)
|
||||
if (!exists) throw new Error('absolute srcpath does not exist')
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: srcpath
|
||||
}
|
||||
}
|
||||
|
||||
const dstdir = path.dirname(dstpath)
|
||||
const relativeToDst = path.join(dstdir, srcpath)
|
||||
const exists = fs.existsSync(relativeToDst)
|
||||
if (exists) {
|
||||
return {
|
||||
toCwd: relativeToDst,
|
||||
toDst: srcpath
|
||||
}
|
||||
}
|
||||
|
||||
const srcExists = fs.existsSync(srcpath)
|
||||
if (!srcExists) throw new Error('relative srcpath does not exist')
|
||||
return {
|
||||
toCwd: srcpath,
|
||||
toDst: path.relative(dstdir, srcpath)
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
symlinkPaths: u(symlinkPaths),
|
||||
symlinkPathsSync
|
||||
}
|
||||
34
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
Normal file
34
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink-type.js
generated
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const u = require('universalify').fromPromise
|
||||
|
||||
async function symlinkType (srcpath, type) {
|
||||
if (type) return type
|
||||
|
||||
let stats
|
||||
try {
|
||||
stats = await fs.lstat(srcpath)
|
||||
} catch {
|
||||
return 'file'
|
||||
}
|
||||
|
||||
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
||||
}
|
||||
|
||||
function symlinkTypeSync (srcpath, type) {
|
||||
if (type) return type
|
||||
|
||||
let stats
|
||||
try {
|
||||
stats = fs.lstatSync(srcpath)
|
||||
} catch {
|
||||
return 'file'
|
||||
}
|
||||
return (stats && stats.isDirectory()) ? 'dir' : 'file'
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
symlinkType: u(symlinkType),
|
||||
symlinkTypeSync
|
||||
}
|
||||
67
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink.js
generated
vendored
Normal file
67
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/ensure/symlink.js
generated
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const path = require('path')
|
||||
const fs = require('../fs')
|
||||
|
||||
const { mkdirs, mkdirsSync } = require('../mkdirs')
|
||||
|
||||
const { symlinkPaths, symlinkPathsSync } = require('./symlink-paths')
|
||||
const { symlinkType, symlinkTypeSync } = require('./symlink-type')
|
||||
|
||||
const { pathExists } = require('../path-exists')
|
||||
|
||||
const { areIdentical } = require('../util/stat')
|
||||
|
||||
async function createSymlink (srcpath, dstpath, type) {
|
||||
let stats
|
||||
try {
|
||||
stats = await fs.lstat(dstpath)
|
||||
} catch { }
|
||||
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const [srcStat, dstStat] = await Promise.all([
|
||||
fs.stat(srcpath),
|
||||
fs.stat(dstpath)
|
||||
])
|
||||
|
||||
if (areIdentical(srcStat, dstStat)) return
|
||||
}
|
||||
|
||||
const relative = await symlinkPaths(srcpath, dstpath)
|
||||
srcpath = relative.toDst
|
||||
const toType = await symlinkType(relative.toCwd, type)
|
||||
const dir = path.dirname(dstpath)
|
||||
|
||||
if (!(await pathExists(dir))) {
|
||||
await mkdirs(dir)
|
||||
}
|
||||
|
||||
return fs.symlink(srcpath, dstpath, toType)
|
||||
}
|
||||
|
||||
function createSymlinkSync (srcpath, dstpath, type) {
|
||||
let stats
|
||||
try {
|
||||
stats = fs.lstatSync(dstpath)
|
||||
} catch { }
|
||||
if (stats && stats.isSymbolicLink()) {
|
||||
const srcStat = fs.statSync(srcpath)
|
||||
const dstStat = fs.statSync(dstpath)
|
||||
if (areIdentical(srcStat, dstStat)) return
|
||||
}
|
||||
|
||||
const relative = symlinkPathsSync(srcpath, dstpath)
|
||||
srcpath = relative.toDst
|
||||
type = symlinkTypeSync(relative.toCwd, type)
|
||||
const dir = path.dirname(dstpath)
|
||||
const exists = fs.existsSync(dir)
|
||||
if (exists) return fs.symlinkSync(srcpath, dstpath, type)
|
||||
mkdirsSync(dir)
|
||||
return fs.symlinkSync(srcpath, dstpath, type)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createSymlink: u(createSymlink),
|
||||
createSymlinkSync
|
||||
}
|
||||
68
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/esm.mjs
generated
vendored
Normal file
68
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/esm.mjs
generated
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
import _copy from './copy/index.js'
|
||||
import _empty from './empty/index.js'
|
||||
import _ensure from './ensure/index.js'
|
||||
import _json from './json/index.js'
|
||||
import _mkdirs from './mkdirs/index.js'
|
||||
import _move from './move/index.js'
|
||||
import _outputFile from './output-file/index.js'
|
||||
import _pathExists from './path-exists/index.js'
|
||||
import _remove from './remove/index.js'
|
||||
|
||||
// NOTE: Only exports fs-extra's functions; fs functions must be imported from "node:fs" or "node:fs/promises"
|
||||
|
||||
export const copy = _copy.copy
|
||||
export const copySync = _copy.copySync
|
||||
export const emptyDirSync = _empty.emptyDirSync
|
||||
export const emptydirSync = _empty.emptydirSync
|
||||
export const emptyDir = _empty.emptyDir
|
||||
export const emptydir = _empty.emptydir
|
||||
export const createFile = _ensure.createFile
|
||||
export const createFileSync = _ensure.createFileSync
|
||||
export const ensureFile = _ensure.ensureFile
|
||||
export const ensureFileSync = _ensure.ensureFileSync
|
||||
export const createLink = _ensure.createLink
|
||||
export const createLinkSync = _ensure.createLinkSync
|
||||
export const ensureLink = _ensure.ensureLink
|
||||
export const ensureLinkSync = _ensure.ensureLinkSync
|
||||
export const createSymlink = _ensure.createSymlink
|
||||
export const createSymlinkSync = _ensure.createSymlinkSync
|
||||
export const ensureSymlink = _ensure.ensureSymlink
|
||||
export const ensureSymlinkSync = _ensure.ensureSymlinkSync
|
||||
export const readJson = _json.readJson
|
||||
export const readJSON = _json.readJSON
|
||||
export const readJsonSync = _json.readJsonSync
|
||||
export const readJSONSync = _json.readJSONSync
|
||||
export const writeJson = _json.writeJson
|
||||
export const writeJSON = _json.writeJSON
|
||||
export const writeJsonSync = _json.writeJsonSync
|
||||
export const writeJSONSync = _json.writeJSONSync
|
||||
export const outputJson = _json.outputJson
|
||||
export const outputJSON = _json.outputJSON
|
||||
export const outputJsonSync = _json.outputJsonSync
|
||||
export const outputJSONSync = _json.outputJSONSync
|
||||
export const mkdirs = _mkdirs.mkdirs
|
||||
export const mkdirsSync = _mkdirs.mkdirsSync
|
||||
export const mkdirp = _mkdirs.mkdirp
|
||||
export const mkdirpSync = _mkdirs.mkdirpSync
|
||||
export const ensureDir = _mkdirs.ensureDir
|
||||
export const ensureDirSync = _mkdirs.ensureDirSync
|
||||
export const move = _move.move
|
||||
export const moveSync = _move.moveSync
|
||||
export const outputFile = _outputFile.outputFile
|
||||
export const outputFileSync = _outputFile.outputFileSync
|
||||
export const pathExists = _pathExists.pathExists
|
||||
export const pathExistsSync = _pathExists.pathExistsSync
|
||||
export const remove = _remove.remove
|
||||
export const removeSync = _remove.removeSync
|
||||
|
||||
export default {
|
||||
..._copy,
|
||||
..._empty,
|
||||
..._ensure,
|
||||
..._json,
|
||||
..._mkdirs,
|
||||
..._move,
|
||||
..._outputFile,
|
||||
..._pathExists,
|
||||
..._remove
|
||||
}
|
||||
146
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/fs/index.js
generated
vendored
Normal file
146
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/fs/index.js
generated
vendored
Normal file
@@ -0,0 +1,146 @@
|
||||
'use strict'
|
||||
// This is adapted from https://github.com/normalize/mz
|
||||
// Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors
|
||||
const u = require('universalify').fromCallback
|
||||
const fs = require('graceful-fs')
|
||||
|
||||
const api = [
|
||||
'access',
|
||||
'appendFile',
|
||||
'chmod',
|
||||
'chown',
|
||||
'close',
|
||||
'copyFile',
|
||||
'cp',
|
||||
'fchmod',
|
||||
'fchown',
|
||||
'fdatasync',
|
||||
'fstat',
|
||||
'fsync',
|
||||
'ftruncate',
|
||||
'futimes',
|
||||
'glob',
|
||||
'lchmod',
|
||||
'lchown',
|
||||
'lutimes',
|
||||
'link',
|
||||
'lstat',
|
||||
'mkdir',
|
||||
'mkdtemp',
|
||||
'open',
|
||||
'opendir',
|
||||
'readdir',
|
||||
'readFile',
|
||||
'readlink',
|
||||
'realpath',
|
||||
'rename',
|
||||
'rm',
|
||||
'rmdir',
|
||||
'stat',
|
||||
'statfs',
|
||||
'symlink',
|
||||
'truncate',
|
||||
'unlink',
|
||||
'utimes',
|
||||
'writeFile'
|
||||
].filter(key => {
|
||||
// Some commands are not available on some systems. Ex:
|
||||
// fs.cp was added in Node.js v16.7.0
|
||||
// fs.statfs was added in Node v19.6.0, v18.15.0
|
||||
// fs.glob was added in Node.js v22.0.0
|
||||
// fs.lchown is not available on at least some Linux
|
||||
return typeof fs[key] === 'function'
|
||||
})
|
||||
|
||||
// Export cloned fs:
|
||||
Object.assign(exports, fs)
|
||||
|
||||
// Universalify async methods:
|
||||
api.forEach(method => {
|
||||
exports[method] = u(fs[method])
|
||||
})
|
||||
|
||||
// We differ from mz/fs in that we still ship the old, broken, fs.exists()
|
||||
// since we are a drop-in replacement for the native module
|
||||
exports.exists = function (filename, callback) {
|
||||
if (typeof callback === 'function') {
|
||||
return fs.exists(filename, callback)
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
return fs.exists(filename, resolve)
|
||||
})
|
||||
}
|
||||
|
||||
// fs.read(), fs.write(), fs.readv(), & fs.writev() need special treatment due to multiple callback args
|
||||
|
||||
exports.read = function (fd, buffer, offset, length, position, callback) {
|
||||
if (typeof callback === 'function') {
|
||||
return fs.read(fd, buffer, offset, length, position, callback)
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.read(fd, buffer, offset, length, position, (err, bytesRead, buffer) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesRead, buffer })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Function signature can be
|
||||
// fs.write(fd, buffer[, offset[, length[, position]]], callback)
|
||||
// OR
|
||||
// fs.write(fd, string[, position[, encoding]], callback)
|
||||
// We need to handle both cases, so we use ...args
|
||||
exports.write = function (fd, buffer, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.write(fd, buffer, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.write(fd, buffer, ...args, (err, bytesWritten, buffer) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesWritten, buffer })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Function signature is
|
||||
// s.readv(fd, buffers[, position], callback)
|
||||
// We need to handle the optional arg, so we use ...args
|
||||
exports.readv = function (fd, buffers, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.readv(fd, buffers, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readv(fd, buffers, ...args, (err, bytesRead, buffers) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesRead, buffers })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// Function signature is
|
||||
// s.writev(fd, buffers[, position], callback)
|
||||
// We need to handle the optional arg, so we use ...args
|
||||
exports.writev = function (fd, buffers, ...args) {
|
||||
if (typeof args[args.length - 1] === 'function') {
|
||||
return fs.writev(fd, buffers, ...args)
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writev(fd, buffers, ...args, (err, bytesWritten, buffers) => {
|
||||
if (err) return reject(err)
|
||||
resolve({ bytesWritten, buffers })
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
// fs.realpath.native sometimes not available if fs is monkey-patched
|
||||
if (typeof fs.realpath.native === 'function') {
|
||||
exports.realpath.native = u(fs.realpath.native)
|
||||
} else {
|
||||
process.emitWarning(
|
||||
'fs.realpath.native is not a function. Is fs being monkey-patched?',
|
||||
'Warning', 'fs-extra-WARN0003'
|
||||
)
|
||||
}
|
||||
16
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/index.js
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/index.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
module.exports = {
|
||||
// Export promiseified graceful-fs:
|
||||
...require('./fs'),
|
||||
// Export extra methods:
|
||||
...require('./copy'),
|
||||
...require('./empty'),
|
||||
...require('./ensure'),
|
||||
...require('./json'),
|
||||
...require('./mkdirs'),
|
||||
...require('./move'),
|
||||
...require('./output-file'),
|
||||
...require('./path-exists'),
|
||||
...require('./remove')
|
||||
}
|
||||
16
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/index.js
generated
vendored
Normal file
16
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/index.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const jsonFile = require('./jsonfile')
|
||||
|
||||
jsonFile.outputJson = u(require('./output-json'))
|
||||
jsonFile.outputJsonSync = require('./output-json-sync')
|
||||
// aliases
|
||||
jsonFile.outputJSON = jsonFile.outputJson
|
||||
jsonFile.outputJSONSync = jsonFile.outputJsonSync
|
||||
jsonFile.writeJSON = jsonFile.writeJson
|
||||
jsonFile.writeJSONSync = jsonFile.writeJsonSync
|
||||
jsonFile.readJSON = jsonFile.readJson
|
||||
jsonFile.readJSONSync = jsonFile.readJsonSync
|
||||
|
||||
module.exports = jsonFile
|
||||
11
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
Normal file
11
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/jsonfile.js
generated
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const jsonFile = require('jsonfile')
|
||||
|
||||
module.exports = {
|
||||
// jsonfile exports
|
||||
readJson: jsonFile.readFile,
|
||||
readJsonSync: jsonFile.readFileSync,
|
||||
writeJson: jsonFile.writeFile,
|
||||
writeJsonSync: jsonFile.writeFileSync
|
||||
}
|
||||
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/output-json-sync.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFileSync } = require('../output-file')
|
||||
|
||||
function outputJsonSync (file, data, options) {
|
||||
const str = stringify(data, options)
|
||||
|
||||
outputFileSync(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJsonSync
|
||||
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/output-json.js
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/json/output-json.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
|
||||
const { stringify } = require('jsonfile/utils')
|
||||
const { outputFile } = require('../output-file')
|
||||
|
||||
async function outputJson (file, data, options = {}) {
|
||||
const str = stringify(data, options)
|
||||
|
||||
await outputFile(file, str, options)
|
||||
}
|
||||
|
||||
module.exports = outputJson
|
||||
14
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
Normal file
14
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/index.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
'use strict'
|
||||
const u = require('universalify').fromPromise
|
||||
const { makeDir: _makeDir, makeDirSync } = require('./make-dir')
|
||||
const makeDir = u(_makeDir)
|
||||
|
||||
module.exports = {
|
||||
mkdirs: makeDir,
|
||||
mkdirsSync: makeDirSync,
|
||||
// alias
|
||||
mkdirp: makeDir,
|
||||
mkdirpSync: makeDirSync,
|
||||
ensureDir: makeDir,
|
||||
ensureDirSync: makeDirSync
|
||||
}
|
||||
27
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Normal file
27
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/make-dir.js
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
'use strict'
|
||||
const fs = require('../fs')
|
||||
const { checkPath } = require('./utils')
|
||||
|
||||
const getMode = options => {
|
||||
const defaults = { mode: 0o777 }
|
||||
if (typeof options === 'number') return options
|
||||
return ({ ...defaults, ...options }).mode
|
||||
}
|
||||
|
||||
module.exports.makeDir = async (dir, options) => {
|
||||
checkPath(dir)
|
||||
|
||||
return fs.mkdir(dir, {
|
||||
mode: getMode(options),
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
|
||||
module.exports.makeDirSync = (dir, options) => {
|
||||
checkPath(dir)
|
||||
|
||||
return fs.mkdirSync(dir, {
|
||||
mode: getMode(options),
|
||||
recursive: true
|
||||
})
|
||||
}
|
||||
21
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/utils.js
generated
vendored
Normal file
21
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/mkdirs/utils.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
// Adapted from https://github.com/sindresorhus/make-dir
|
||||
// Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
'use strict'
|
||||
const path = require('path')
|
||||
|
||||
// https://github.com/nodejs/node/issues/8987
|
||||
// https://github.com/libuv/libuv/pull/1088
|
||||
module.exports.checkPath = function checkPath (pth) {
|
||||
if (process.platform === 'win32') {
|
||||
const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, ''))
|
||||
|
||||
if (pathHasInvalidWinCharacters) {
|
||||
const error = new Error(`Path contains invalid characters: ${pth}`)
|
||||
error.code = 'EINVAL'
|
||||
throw error
|
||||
}
|
||||
}
|
||||
}
|
||||
7
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/index.js
generated
vendored
Normal file
7
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/index.js
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
module.exports = {
|
||||
move: u(require('./move')),
|
||||
moveSync: require('./move-sync')
|
||||
}
|
||||
55
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/move-sync.js
generated
vendored
Normal file
55
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/move-sync.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const path = require('path')
|
||||
const copySync = require('../copy').copySync
|
||||
const removeSync = require('../remove').removeSync
|
||||
const mkdirpSync = require('../mkdirs').mkdirpSync
|
||||
const stat = require('../util/stat')
|
||||
|
||||
function moveSync (src, dest, opts) {
|
||||
opts = opts || {}
|
||||
const overwrite = opts.overwrite || opts.clobber || false
|
||||
|
||||
const { srcStat, isChangingCase = false } = stat.checkPathsSync(src, dest, 'move', opts)
|
||||
stat.checkParentPathsSync(src, srcStat, dest, 'move')
|
||||
if (!isParentRoot(dest)) mkdirpSync(path.dirname(dest))
|
||||
return doRename(src, dest, overwrite, isChangingCase)
|
||||
}
|
||||
|
||||
function isParentRoot (dest) {
|
||||
const parent = path.dirname(dest)
|
||||
const parsedPath = path.parse(parent)
|
||||
return parsedPath.root === parent
|
||||
}
|
||||
|
||||
function doRename (src, dest, overwrite, isChangingCase) {
|
||||
if (isChangingCase) return rename(src, dest, overwrite)
|
||||
if (overwrite) {
|
||||
removeSync(dest)
|
||||
return rename(src, dest, overwrite)
|
||||
}
|
||||
if (fs.existsSync(dest)) throw new Error('dest already exists.')
|
||||
return rename(src, dest, overwrite)
|
||||
}
|
||||
|
||||
function rename (src, dest, overwrite) {
|
||||
try {
|
||||
fs.renameSync(src, dest)
|
||||
} catch (err) {
|
||||
if (err.code !== 'EXDEV') throw err
|
||||
return moveAcrossDevice(src, dest, overwrite)
|
||||
}
|
||||
}
|
||||
|
||||
function moveAcrossDevice (src, dest, overwrite) {
|
||||
const opts = {
|
||||
overwrite,
|
||||
errorOnExist: true,
|
||||
preserveTimestamps: true
|
||||
}
|
||||
copySync(src, dest, opts)
|
||||
return removeSync(src)
|
||||
}
|
||||
|
||||
module.exports = moveSync
|
||||
59
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/move.js
generated
vendored
Normal file
59
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/move/move.js
generated
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const { copy } = require('../copy')
|
||||
const { remove } = require('../remove')
|
||||
const { mkdirp } = require('../mkdirs')
|
||||
const { pathExists } = require('../path-exists')
|
||||
const stat = require('../util/stat')
|
||||
|
||||
async function move (src, dest, opts = {}) {
|
||||
const overwrite = opts.overwrite || opts.clobber || false
|
||||
|
||||
const { srcStat, isChangingCase = false } = await stat.checkPaths(src, dest, 'move', opts)
|
||||
|
||||
await stat.checkParentPaths(src, srcStat, dest, 'move')
|
||||
|
||||
// If the parent of dest is not root, make sure it exists before proceeding
|
||||
const destParent = path.dirname(dest)
|
||||
const parsedParentPath = path.parse(destParent)
|
||||
if (parsedParentPath.root !== destParent) {
|
||||
await mkdirp(destParent)
|
||||
}
|
||||
|
||||
return doRename(src, dest, overwrite, isChangingCase)
|
||||
}
|
||||
|
||||
async function doRename (src, dest, overwrite, isChangingCase) {
|
||||
if (!isChangingCase) {
|
||||
if (overwrite) {
|
||||
await remove(dest)
|
||||
} else if (await pathExists(dest)) {
|
||||
throw new Error('dest already exists.')
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Try w/ rename first, and try copy + remove if EXDEV
|
||||
await fs.rename(src, dest)
|
||||
} catch (err) {
|
||||
if (err.code !== 'EXDEV') {
|
||||
throw err
|
||||
}
|
||||
await moveAcrossDevice(src, dest, overwrite)
|
||||
}
|
||||
}
|
||||
|
||||
async function moveAcrossDevice (src, dest, overwrite) {
|
||||
const opts = {
|
||||
overwrite,
|
||||
errorOnExist: true,
|
||||
preserveTimestamps: true
|
||||
}
|
||||
|
||||
await copy(src, dest, opts)
|
||||
return remove(src)
|
||||
}
|
||||
|
||||
module.exports = move
|
||||
31
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/output-file/index.js
generated
vendored
Normal file
31
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/output-file/index.js
generated
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
'use strict'
|
||||
|
||||
const u = require('universalify').fromPromise
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const mkdir = require('../mkdirs')
|
||||
const pathExists = require('../path-exists').pathExists
|
||||
|
||||
async function outputFile (file, data, encoding = 'utf-8') {
|
||||
const dir = path.dirname(file)
|
||||
|
||||
if (!(await pathExists(dir))) {
|
||||
await mkdir.mkdirs(dir)
|
||||
}
|
||||
|
||||
return fs.writeFile(file, data, encoding)
|
||||
}
|
||||
|
||||
function outputFileSync (file, ...args) {
|
||||
const dir = path.dirname(file)
|
||||
if (!fs.existsSync(dir)) {
|
||||
mkdir.mkdirsSync(dir)
|
||||
}
|
||||
|
||||
fs.writeFileSync(file, ...args)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
outputFile: u(outputFile),
|
||||
outputFileSync
|
||||
}
|
||||
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/path-exists/index.js
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/path-exists/index.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
'use strict'
|
||||
const u = require('universalify').fromPromise
|
||||
const fs = require('../fs')
|
||||
|
||||
function pathExists (path) {
|
||||
return fs.access(path).then(() => true).catch(() => false)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
pathExists: u(pathExists),
|
||||
pathExistsSync: fs.existsSync
|
||||
}
|
||||
17
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/remove/index.js
generated
vendored
Normal file
17
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/remove/index.js
generated
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('graceful-fs')
|
||||
const u = require('universalify').fromCallback
|
||||
|
||||
function remove (path, callback) {
|
||||
fs.rm(path, { recursive: true, force: true }, callback)
|
||||
}
|
||||
|
||||
function removeSync (path) {
|
||||
fs.rmSync(path, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
remove: u(remove),
|
||||
removeSync
|
||||
}
|
||||
29
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/async.js
generated
vendored
Normal file
29
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/async.js
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
'use strict'
|
||||
|
||||
// https://github.com/jprichardson/node-fs-extra/issues/1056
|
||||
// Performing parallel operations on each item of an async iterator is
|
||||
// surprisingly hard; you need to have handlers in place to avoid getting an
|
||||
// UnhandledPromiseRejectionWarning.
|
||||
// NOTE: This function does not presently handle return values, only errors
|
||||
async function asyncIteratorConcurrentProcess (iterator, fn) {
|
||||
const promises = []
|
||||
for await (const item of iterator) {
|
||||
promises.push(
|
||||
fn(item).then(
|
||||
() => null,
|
||||
(err) => err ?? new Error('unknown error')
|
||||
)
|
||||
)
|
||||
}
|
||||
await Promise.all(
|
||||
promises.map((promise) =>
|
||||
promise.then((possibleErr) => {
|
||||
if (possibleErr !== null) throw possibleErr
|
||||
})
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
asyncIteratorConcurrentProcess
|
||||
}
|
||||
159
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/stat.js
generated
vendored
Normal file
159
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/stat.js
generated
vendored
Normal file
@@ -0,0 +1,159 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const path = require('path')
|
||||
const u = require('universalify').fromPromise
|
||||
|
||||
function getStats (src, dest, opts) {
|
||||
const statFunc = opts.dereference
|
||||
? (file) => fs.stat(file, { bigint: true })
|
||||
: (file) => fs.lstat(file, { bigint: true })
|
||||
return Promise.all([
|
||||
statFunc(src),
|
||||
statFunc(dest).catch(err => {
|
||||
if (err.code === 'ENOENT') return null
|
||||
throw err
|
||||
})
|
||||
]).then(([srcStat, destStat]) => ({ srcStat, destStat }))
|
||||
}
|
||||
|
||||
function getStatsSync (src, dest, opts) {
|
||||
let destStat
|
||||
const statFunc = opts.dereference
|
||||
? (file) => fs.statSync(file, { bigint: true })
|
||||
: (file) => fs.lstatSync(file, { bigint: true })
|
||||
const srcStat = statFunc(src)
|
||||
try {
|
||||
destStat = statFunc(dest)
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return { srcStat, destStat: null }
|
||||
throw err
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
async function checkPaths (src, dest, funcName, opts) {
|
||||
const { srcStat, destStat } = await getStats(src, dest, opts)
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
const srcBaseName = path.basename(src)
|
||||
const destBaseName = path.basename(dest)
|
||||
if (funcName === 'move' &&
|
||||
srcBaseName !== destBaseName &&
|
||||
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
||||
return { srcStat, destStat, isChangingCase: true }
|
||||
}
|
||||
throw new Error('Source and destination must not be the same.')
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
function checkPathsSync (src, dest, funcName, opts) {
|
||||
const { srcStat, destStat } = getStatsSync(src, dest, opts)
|
||||
|
||||
if (destStat) {
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
const srcBaseName = path.basename(src)
|
||||
const destBaseName = path.basename(dest)
|
||||
if (funcName === 'move' &&
|
||||
srcBaseName !== destBaseName &&
|
||||
srcBaseName.toLowerCase() === destBaseName.toLowerCase()) {
|
||||
return { srcStat, destStat, isChangingCase: true }
|
||||
}
|
||||
throw new Error('Source and destination must not be the same.')
|
||||
}
|
||||
if (srcStat.isDirectory() && !destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)
|
||||
}
|
||||
if (!srcStat.isDirectory() && destStat.isDirectory()) {
|
||||
throw new Error(`Cannot overwrite directory '${dest}' with non-directory '${src}'.`)
|
||||
}
|
||||
}
|
||||
|
||||
if (srcStat.isDirectory() && isSrcSubdir(src, dest)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
return { srcStat, destStat }
|
||||
}
|
||||
|
||||
// recursively check if dest parent is a subdirectory of src.
|
||||
// It works for all file types including symlinks since it
|
||||
// checks the src and dest inodes. It starts from the deepest
|
||||
// parent and stops once it reaches the src parent or the root path.
|
||||
async function checkParentPaths (src, srcStat, dest, funcName) {
|
||||
const srcParent = path.resolve(path.dirname(src))
|
||||
const destParent = path.resolve(path.dirname(dest))
|
||||
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
||||
|
||||
let destStat
|
||||
try {
|
||||
destStat = await fs.stat(destParent, { bigint: true })
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return
|
||||
throw err
|
||||
}
|
||||
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
|
||||
return checkParentPaths(src, srcStat, destParent, funcName)
|
||||
}
|
||||
|
||||
function checkParentPathsSync (src, srcStat, dest, funcName) {
|
||||
const srcParent = path.resolve(path.dirname(src))
|
||||
const destParent = path.resolve(path.dirname(dest))
|
||||
if (destParent === srcParent || destParent === path.parse(destParent).root) return
|
||||
let destStat
|
||||
try {
|
||||
destStat = fs.statSync(destParent, { bigint: true })
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') return
|
||||
throw err
|
||||
}
|
||||
if (areIdentical(srcStat, destStat)) {
|
||||
throw new Error(errMsg(src, dest, funcName))
|
||||
}
|
||||
return checkParentPathsSync(src, srcStat, destParent, funcName)
|
||||
}
|
||||
|
||||
function areIdentical (srcStat, destStat) {
|
||||
// stat.dev can be 0n on windows when node version >= 22.x.x
|
||||
return destStat.ino !== undefined && destStat.dev !== undefined && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev
|
||||
}
|
||||
|
||||
// return true if dest is a subdir of src, otherwise false.
|
||||
// It only checks the path strings.
|
||||
function isSrcSubdir (src, dest) {
|
||||
const srcArr = path.resolve(src).split(path.sep).filter(i => i)
|
||||
const destArr = path.resolve(dest).split(path.sep).filter(i => i)
|
||||
return srcArr.every((cur, i) => destArr[i] === cur)
|
||||
}
|
||||
|
||||
function errMsg (src, dest, funcName) {
|
||||
return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.`
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
// checkPaths
|
||||
checkPaths: u(checkPaths),
|
||||
checkPathsSync,
|
||||
// checkParent
|
||||
checkParentPaths: u(checkParentPaths),
|
||||
checkParentPathsSync,
|
||||
// Misc
|
||||
isSrcSubdir,
|
||||
areIdentical
|
||||
}
|
||||
36
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/utimes.js
generated
vendored
Normal file
36
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/lib/util/utimes.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict'
|
||||
|
||||
const fs = require('../fs')
|
||||
const u = require('universalify').fromPromise
|
||||
|
||||
async function utimesMillis (path, atime, mtime) {
|
||||
// if (!HAS_MILLIS_RES) return fs.utimes(path, atime, mtime, callback)
|
||||
const fd = await fs.open(path, 'r+')
|
||||
|
||||
let closeErr = null
|
||||
|
||||
try {
|
||||
await fs.futimes(fd, atime, mtime)
|
||||
} finally {
|
||||
try {
|
||||
await fs.close(fd)
|
||||
} catch (e) {
|
||||
closeErr = e
|
||||
}
|
||||
}
|
||||
|
||||
if (closeErr) {
|
||||
throw closeErr
|
||||
}
|
||||
}
|
||||
|
||||
function utimesMillisSync (path, atime, mtime) {
|
||||
const fd = fs.openSync(path, 'r+')
|
||||
fs.futimesSync(fd, atime, mtime)
|
||||
return fs.closeSync(fd)
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
utimesMillis: u(utimesMillis),
|
||||
utimesMillisSync
|
||||
}
|
||||
71
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/package.json
generated
vendored
Normal file
71
desktop-operator/node_modules/@electron/universal/node_modules/fs-extra/package.json
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"name": "fs-extra",
|
||||
"version": "11.3.2",
|
||||
"description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as recursive mkdir, copy, and remove.",
|
||||
"engines": {
|
||||
"node": ">=14.14"
|
||||
},
|
||||
"homepage": "https://github.com/jprichardson/node-fs-extra",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/jprichardson/node-fs-extra"
|
||||
},
|
||||
"keywords": [
|
||||
"fs",
|
||||
"file",
|
||||
"file system",
|
||||
"copy",
|
||||
"directory",
|
||||
"extra",
|
||||
"mkdirp",
|
||||
"mkdir",
|
||||
"mkdirs",
|
||||
"recursive",
|
||||
"json",
|
||||
"read",
|
||||
"write",
|
||||
"extra",
|
||||
"delete",
|
||||
"remove",
|
||||
"touch",
|
||||
"create",
|
||||
"text",
|
||||
"output",
|
||||
"move",
|
||||
"promise"
|
||||
],
|
||||
"author": "JP Richardson <jprichardson@gmail.com>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"graceful-fs": "^4.2.0",
|
||||
"jsonfile": "^6.0.1",
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"klaw": "^2.1.1",
|
||||
"klaw-sync": "^3.0.2",
|
||||
"minimist": "^1.1.1",
|
||||
"mocha": "^10.1.0",
|
||||
"nyc": "^15.0.0",
|
||||
"proxyquire": "^2.0.1",
|
||||
"read-dir-files": "^0.1.1",
|
||||
"standard": "^17.0.0"
|
||||
},
|
||||
"main": "./lib/index.js",
|
||||
"exports": {
|
||||
".": "./lib/index.js",
|
||||
"./esm": "./lib/esm.mjs"
|
||||
},
|
||||
"files": [
|
||||
"lib/",
|
||||
"!lib/**/__tests__/"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha",
|
||||
"test": "npm run lint && npm run unit && npm run unit-esm",
|
||||
"unit": "nyc node test.js",
|
||||
"unit-esm": "node test.mjs"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
15
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
(The MIT License)
|
||||
|
||||
Copyright (c) 2012-2015, JP Richardson <jprichardson@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files
|
||||
(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify,
|
||||
merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
|
||||
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
|
||||
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
230
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/README.md
generated
vendored
Normal file
230
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/README.md
generated
vendored
Normal file
@@ -0,0 +1,230 @@
|
||||
Node.js - jsonfile
|
||||
================
|
||||
|
||||
Easily read/write JSON files in Node.js. _Note: this module cannot be used in the browser._
|
||||
|
||||
[](https://www.npmjs.org/package/jsonfile)
|
||||
[](https://github.com/jprichardson/node-jsonfile/actions?query=branch%3Amaster)
|
||||
[](https://ci.appveyor.com/project/jprichardson/node-jsonfile/branch/master)
|
||||
|
||||
<a href="https://github.com/feross/standard"><img src="https://cdn.rawgit.com/feross/standard/master/sticker.svg" alt="Standard JavaScript" width="100"></a>
|
||||
|
||||
Why?
|
||||
----
|
||||
|
||||
Writing `JSON.stringify()` and then `fs.writeFile()` and `JSON.parse()` with `fs.readFile()` enclosed in `try/catch` blocks became annoying.
|
||||
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
npm install --save jsonfile
|
||||
|
||||
|
||||
|
||||
API
|
||||
---
|
||||
|
||||
* [`readFile(filename, [options], callback)`](#readfilefilename-options-callback)
|
||||
* [`readFileSync(filename, [options])`](#readfilesyncfilename-options)
|
||||
* [`writeFile(filename, obj, [options], callback)`](#writefilefilename-obj-options-callback)
|
||||
* [`writeFileSync(filename, obj, [options])`](#writefilesyncfilename-obj-options)
|
||||
|
||||
----
|
||||
|
||||
### readFile(filename, [options], callback)
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFile`](https://nodejs.org/api/fs.html#fs_fs_readfile_path_options_callback) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If `JSON.parse` throws an error, pass this error to the callback.
|
||||
If `false`, returns `null` for the object.
|
||||
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file, function (err, obj) {
|
||||
if (err) console.error(err)
|
||||
console.dir(obj)
|
||||
})
|
||||
```
|
||||
|
||||
You can also use this method with promises. The `readFile` method will return a promise if you do not pass a callback function.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
jsonfile.readFile(file)
|
||||
.then(obj => console.dir(obj))
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### readFileSync(filename, [options])
|
||||
|
||||
`options` (`object`, default `undefined`): Pass in any [`fs.readFileSync`](https://nodejs.org/api/fs.html#fs_fs_readfilesync_path_options) options or set `reviver` for a [JSON reviver](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse).
|
||||
- `throws` (`boolean`, default: `true`). If an error is encountered reading or parsing the file, throw the error. If `false`, returns `null` for the object.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
const file = '/tmp/data.json'
|
||||
|
||||
console.dir(jsonfile.readFileSync(file))
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFile(filename, obj, [options], callback)
|
||||
|
||||
`options`: Pass in any [`fs.writeFile`](https://nodejs.org/api/fs.html#fs_fs_writefile_file_data_options_callback) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
|
||||
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
Or use with promises as follows:
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj)
|
||||
.then(res => {
|
||||
console.log('Write complete')
|
||||
})
|
||||
.catch(error => console.error(error))
|
||||
```
|
||||
|
||||
|
||||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2 }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2, EOL: '\r\n' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
|
||||
**disabling the EOL at the end of file:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { spaces: 2, finalEOL: false }, function (err) {
|
||||
if (err) console.log(err)
|
||||
})
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFile` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFile(file, obj, { flag: 'a' }, function (err) {
|
||||
if (err) console.error(err)
|
||||
})
|
||||
```
|
||||
|
||||
----
|
||||
|
||||
### writeFileSync(filename, obj, [options])
|
||||
|
||||
`options`: Pass in any [`fs.writeFileSync`](https://nodejs.org/api/fs.html#fs_fs_writefilesync_file_data_options) options or set `replacer` for a [JSON replacer](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify). Can also pass in `spaces`, or override `EOL` string or set `finalEOL` flag as `false` to not save the file with `EOL` at the end.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj)
|
||||
```
|
||||
|
||||
**formatting with spaces:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2 })
|
||||
```
|
||||
|
||||
**overriding EOL:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2, EOL: '\r\n' })
|
||||
```
|
||||
|
||||
**disabling the EOL at the end of file:**
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/data.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { spaces: 2, finalEOL: false })
|
||||
```
|
||||
|
||||
**appending to an existing JSON file:**
|
||||
|
||||
You can use `fs.writeFileSync` option `{ flag: 'a' }` to achieve this.
|
||||
|
||||
```js
|
||||
const jsonfile = require('jsonfile')
|
||||
|
||||
const file = '/tmp/mayAlreadyExistedData.json'
|
||||
const obj = { name: 'JP' }
|
||||
|
||||
jsonfile.writeFileSync(file, obj, { flag: 'a' })
|
||||
```
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
(MIT License)
|
||||
|
||||
Copyright 2012-2016, JP Richardson <jprichardson@gmail.com>
|
||||
88
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/index.js
generated
vendored
Normal file
88
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/index.js
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
let _fs
|
||||
try {
|
||||
_fs = require('graceful-fs')
|
||||
} catch (_) {
|
||||
_fs = require('fs')
|
||||
}
|
||||
const universalify = require('universalify')
|
||||
const { stringify, stripBom } = require('./utils')
|
||||
|
||||
async function _readFile (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
let data = await universalify.fromCallback(fs.readFile)(file, options)
|
||||
|
||||
data = stripBom(data)
|
||||
|
||||
let obj
|
||||
try {
|
||||
obj = JSON.parse(data, options ? options.reviver : null)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
|
||||
const readFile = universalify.fromPromise(_readFile)
|
||||
|
||||
function readFileSync (file, options = {}) {
|
||||
if (typeof options === 'string') {
|
||||
options = { encoding: options }
|
||||
}
|
||||
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const shouldThrow = 'throws' in options ? options.throws : true
|
||||
|
||||
try {
|
||||
let content = fs.readFileSync(file, options)
|
||||
content = stripBom(content)
|
||||
return JSON.parse(content, options.reviver)
|
||||
} catch (err) {
|
||||
if (shouldThrow) {
|
||||
err.message = `${file}: ${err.message}`
|
||||
throw err
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function _writeFile (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const str = stringify(obj, options)
|
||||
|
||||
await universalify.fromCallback(fs.writeFile)(file, str, options)
|
||||
}
|
||||
|
||||
const writeFile = universalify.fromPromise(_writeFile)
|
||||
|
||||
function writeFileSync (file, obj, options = {}) {
|
||||
const fs = options.fs || _fs
|
||||
|
||||
const str = stringify(obj, options)
|
||||
// not sure if fs.writeFileSync returns anything, but just in case
|
||||
return fs.writeFileSync(file, str, options)
|
||||
}
|
||||
|
||||
// NOTE: do not change this export format; required for ESM compat
|
||||
// see https://github.com/jprichardson/node-jsonfile/pull/162 for details
|
||||
module.exports = {
|
||||
readFile,
|
||||
readFileSync,
|
||||
writeFile,
|
||||
writeFileSync
|
||||
}
|
||||
40
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/package.json
generated
vendored
Normal file
40
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/package.json
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"name": "jsonfile",
|
||||
"version": "6.2.0",
|
||||
"description": "Easily read/write JSON files.",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git@github.com:jprichardson/node-jsonfile.git"
|
||||
},
|
||||
"keywords": [
|
||||
"read",
|
||||
"write",
|
||||
"file",
|
||||
"json",
|
||||
"fs",
|
||||
"fs-extra"
|
||||
],
|
||||
"author": "JP Richardson <jprichardson@gmail.com>",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"universalify": "^2.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"graceful-fs": "^4.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "^8.2.0",
|
||||
"rimraf": "^2.4.0",
|
||||
"standard": "^16.0.1"
|
||||
},
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"index.js",
|
||||
"utils.js"
|
||||
],
|
||||
"scripts": {
|
||||
"lint": "standard",
|
||||
"test": "npm run lint && npm run unit",
|
||||
"unit": "mocha"
|
||||
}
|
||||
}
|
||||
14
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/utils.js
generated
vendored
Normal file
14
desktop-operator/node_modules/@electron/universal/node_modules/jsonfile/utils.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
function stringify (obj, { EOL = '\n', finalEOL = true, replacer = null, spaces } = {}) {
|
||||
const EOF = finalEOL ? EOL : ''
|
||||
const str = JSON.stringify(obj, replacer, spaces)
|
||||
|
||||
return str.replace(/\n/g, EOL) + EOF
|
||||
}
|
||||
|
||||
function stripBom (content) {
|
||||
// we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified
|
||||
if (Buffer.isBuffer(content)) content = content.toString('utf8')
|
||||
return content.replace(/^\uFEFF/, '')
|
||||
}
|
||||
|
||||
module.exports = { stringify, stripBom }
|
||||
15
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/LICENSE
generated
vendored
Normal file
15
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
The ISC License
|
||||
|
||||
Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
|
||||
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
copyright notice and this permission notice appear in all copies.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
|
||||
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
454
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/README.md
generated
vendored
Normal file
454
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/README.md
generated
vendored
Normal file
@@ -0,0 +1,454 @@
|
||||
# minimatch
|
||||
|
||||
A minimal matching utility.
|
||||
|
||||
This is the matching library used internally by npm.
|
||||
|
||||
It works by converting glob expressions into JavaScript `RegExp`
|
||||
objects.
|
||||
|
||||
## Usage
|
||||
|
||||
```js
|
||||
// hybrid module, load with require() or import
|
||||
import { minimatch } from 'minimatch'
|
||||
// or:
|
||||
const { minimatch } = require('minimatch')
|
||||
|
||||
minimatch('bar.foo', '*.foo') // true!
|
||||
minimatch('bar.foo', '*.bar') // false!
|
||||
minimatch('bar.foo', '*.+(bar|foo)', { debug: true }) // true, and noisy!
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
Supports these glob features:
|
||||
|
||||
- Brace Expansion
|
||||
- Extended glob matching
|
||||
- "Globstar" `**` matching
|
||||
- [Posix character
|
||||
classes](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html),
|
||||
like `[[:alpha:]]`, supporting the full range of Unicode
|
||||
characters. For example, `[[:alpha:]]` will match against
|
||||
`'é'`, though `[a-zA-Z]` will not. Collating symbol and set
|
||||
matching is not supported, so `[[=e=]]` will _not_ match `'é'`
|
||||
and `[[.ch.]]` will not match `'ch'` in locales where `ch` is
|
||||
considered a single character.
|
||||
|
||||
See:
|
||||
|
||||
- `man sh`
|
||||
- `man bash` [Pattern
|
||||
Matching](https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html)
|
||||
- `man 3 fnmatch`
|
||||
- `man 5 gitignore`
|
||||
|
||||
## Windows
|
||||
|
||||
**Please only use forward-slashes in glob expressions.**
|
||||
|
||||
Though windows uses either `/` or `\` as its path separator, only `/`
|
||||
characters are used by this glob implementation. You must use
|
||||
forward-slashes **only** in glob expressions. Back-slashes in patterns
|
||||
will always be interpreted as escape characters, not path separators.
|
||||
|
||||
Note that `\` or `/` _will_ be interpreted as path separators in paths on
|
||||
Windows, and will match against `/` in glob expressions.
|
||||
|
||||
So just always use `/` in patterns.
|
||||
|
||||
### UNC Paths
|
||||
|
||||
On Windows, UNC paths like `//?/c:/...` or
|
||||
`//ComputerName/Share/...` are handled specially.
|
||||
|
||||
- Patterns starting with a double-slash followed by some
|
||||
non-slash characters will preserve their double-slash. As a
|
||||
result, a pattern like `//*` will match `//x`, but not `/x`.
|
||||
- Patterns staring with `//?/<drive letter>:` will _not_ treat
|
||||
the `?` as a wildcard character. Instead, it will be treated
|
||||
as a normal string.
|
||||
- Patterns starting with `//?/<drive letter>:/...` will match
|
||||
file paths starting with `<drive letter>:/...`, and vice versa,
|
||||
as if the `//?/` was not present. This behavior only is
|
||||
present when the drive letters are a case-insensitive match to
|
||||
one another. The remaining portions of the path/pattern are
|
||||
compared case sensitively, unless `nocase:true` is set.
|
||||
|
||||
Note that specifying a UNC path using `\` characters as path
|
||||
separators is always allowed in the file path argument, but only
|
||||
allowed in the pattern argument when `windowsPathsNoEscape: true`
|
||||
is set in the options.
|
||||
|
||||
## Minimatch Class
|
||||
|
||||
Create a minimatch object by instantiating the `minimatch.Minimatch` class.
|
||||
|
||||
```javascript
|
||||
var Minimatch = require('minimatch').Minimatch
|
||||
var mm = new Minimatch(pattern, options)
|
||||
```
|
||||
|
||||
### Properties
|
||||
|
||||
- `pattern` The original pattern the minimatch object represents.
|
||||
- `options` The options supplied to the constructor.
|
||||
- `set` A 2-dimensional array of regexp or string expressions.
|
||||
Each row in the
|
||||
array corresponds to a brace-expanded pattern. Each item in the row
|
||||
corresponds to a single path-part. For example, the pattern
|
||||
`{a,b/c}/d` would expand to a set of patterns like:
|
||||
|
||||
[ [ a, d ]
|
||||
, [ b, c, d ] ]
|
||||
|
||||
If a portion of the pattern doesn't have any "magic" in it
|
||||
(that is, it's something like `"foo"` rather than `fo*o?`), then it
|
||||
will be left as a string rather than converted to a regular
|
||||
expression.
|
||||
|
||||
- `regexp` Created by the `makeRe` method. A single regular expression
|
||||
expressing the entire pattern. This is useful in cases where you wish
|
||||
to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled.
|
||||
- `negate` True if the pattern is negated.
|
||||
- `comment` True if the pattern is a comment.
|
||||
- `empty` True if the pattern is `""`.
|
||||
|
||||
### Methods
|
||||
|
||||
- `makeRe()` Generate the `regexp` member if necessary, and return it.
|
||||
Will return `false` if the pattern is invalid.
|
||||
- `match(fname)` Return true if the filename matches the pattern, or
|
||||
false otherwise.
|
||||
- `matchOne(fileArray, patternArray, partial)` Take a `/`-split
|
||||
filename, and match it against a single row in the `regExpSet`. This
|
||||
method is mainly for internal use, but is exposed so that it can be
|
||||
used by a glob-walker that needs to avoid excessive filesystem calls.
|
||||
- `hasMagic()` Returns true if the parsed pattern contains any
|
||||
magic characters. Returns false if all comparator parts are
|
||||
string literals. If the `magicalBraces` option is set on the
|
||||
constructor, then it will consider brace expansions which are
|
||||
not otherwise magical to be magic. If not set, then a pattern
|
||||
like `a{b,c}d` will return `false`, because neither `abd` nor
|
||||
`acd` contain any special glob characters.
|
||||
|
||||
This does **not** mean that the pattern string can be used as a
|
||||
literal filename, as it may contain magic glob characters that
|
||||
are escaped. For example, the pattern `\\*` or `[*]` would not
|
||||
be considered to have magic, as the matching portion parses to
|
||||
the literal string `'*'` and would match a path named `'*'`,
|
||||
not `'\\*'` or `'[*]'`. The `minimatch.unescape()` method may
|
||||
be used to remove escape characters.
|
||||
|
||||
All other methods are internal, and will be called as necessary.
|
||||
|
||||
### minimatch(path, pattern, options)
|
||||
|
||||
Main export. Tests a path against the pattern using the options.
|
||||
|
||||
```javascript
|
||||
var isJS = minimatch(file, '*.js', { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.filter(pattern, options)
|
||||
|
||||
Returns a function that tests its
|
||||
supplied argument, suitable for use with `Array.filter`. Example:
|
||||
|
||||
```javascript
|
||||
var javascripts = fileList.filter(minimatch.filter('*.js', { matchBase: true }))
|
||||
```
|
||||
|
||||
### minimatch.escape(pattern, options = {})
|
||||
|
||||
Escape all magic characters in a glob pattern, so that it will
|
||||
only ever match literal strings
|
||||
|
||||
If the `windowsPathsNoEscape` option is used, then characters are
|
||||
escaped by wrapping in `[]`, because a magic character wrapped in
|
||||
a character class can only be satisfied by that exact character.
|
||||
|
||||
Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot
|
||||
be escaped or unescaped.
|
||||
|
||||
### minimatch.unescape(pattern, options = {})
|
||||
|
||||
Un-escape a glob string that may contain some escaped characters.
|
||||
|
||||
If the `windowsPathsNoEscape` option is used, then square-brace
|
||||
escapes are removed, but not backslash escapes. For example, it
|
||||
will turn the string `'[*]'` into `*`, but it will not turn
|
||||
`'\\*'` into `'*'`, because `\` is a path separator in
|
||||
`windowsPathsNoEscape` mode.
|
||||
|
||||
When `windowsPathsNoEscape` is not set, then both brace escapes
|
||||
and backslash escapes are removed.
|
||||
|
||||
Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot
|
||||
be escaped or unescaped.
|
||||
|
||||
### minimatch.match(list, pattern, options)
|
||||
|
||||
Match against the list of
|
||||
files, in the style of fnmatch or glob. If nothing is matched, and
|
||||
options.nonull is set, then return a list containing the pattern itself.
|
||||
|
||||
```javascript
|
||||
var javascripts = minimatch.match(fileList, '*.js', { matchBase: true })
|
||||
```
|
||||
|
||||
### minimatch.makeRe(pattern, options)
|
||||
|
||||
Make a regular expression object from the pattern.
|
||||
|
||||
## Options
|
||||
|
||||
All options are `false` by default.
|
||||
|
||||
### debug
|
||||
|
||||
Dump a ton of stuff to stderr.
|
||||
|
||||
### nobrace
|
||||
|
||||
Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
|
||||
### noglobstar
|
||||
|
||||
Disable `**` matching against multiple folder names.
|
||||
|
||||
### dot
|
||||
|
||||
Allow patterns to match filenames starting with a period, even if
|
||||
the pattern does not explicitly have a period in that spot.
|
||||
|
||||
Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot`
|
||||
is set.
|
||||
|
||||
### noext
|
||||
|
||||
Disable "extglob" style patterns like `+(a|b)`.
|
||||
|
||||
### nocase
|
||||
|
||||
Perform a case-insensitive match.
|
||||
|
||||
### nocaseMagicOnly
|
||||
|
||||
When used with `{nocase: true}`, create regular expressions that
|
||||
are case-insensitive, but leave string match portions untouched.
|
||||
Has no effect when used without `{nocase: true}`
|
||||
|
||||
Useful when some other form of case-insensitive matching is used,
|
||||
or if the original string representation is useful in some other
|
||||
way.
|
||||
|
||||
### nonull
|
||||
|
||||
When a match is not found by `minimatch.match`, return a list containing
|
||||
the pattern itself if this option is set. When not set, an empty list
|
||||
is returned if there are no matches.
|
||||
|
||||
### magicalBraces
|
||||
|
||||
This only affects the results of the `Minimatch.hasMagic` method.
|
||||
|
||||
If the pattern contains brace expansions, such as `a{b,c}d`, but
|
||||
no other magic characters, then the `Minimatch.hasMagic()` method
|
||||
will return `false` by default. When this option set, it will
|
||||
return `true` for brace expansion as well as other magic glob
|
||||
characters.
|
||||
|
||||
### matchBase
|
||||
|
||||
If set, then patterns without slashes will be matched
|
||||
against the basename of the path if it contains slashes. For example,
|
||||
`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
|
||||
### nocomment
|
||||
|
||||
Suppress the behavior of treating `#` at the start of a pattern as a
|
||||
comment.
|
||||
|
||||
### nonegate
|
||||
|
||||
Suppress the behavior of treating a leading `!` character as negation.
|
||||
|
||||
### flipNegate
|
||||
|
||||
Returns from negate expressions the same as if they were not negated.
|
||||
(Ie, true on a hit, false on a miss.)
|
||||
|
||||
### partial
|
||||
|
||||
Compare a partial path to a pattern. As long as the parts of the path that
|
||||
are present are not contradicted by the pattern, it will be treated as a
|
||||
match. This is useful in applications where you're walking through a
|
||||
folder structure, and don't yet have the full path, but want to ensure that
|
||||
you do not walk down paths that can never be a match.
|
||||
|
||||
For example,
|
||||
|
||||
```js
|
||||
minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d
|
||||
minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a
|
||||
```
|
||||
|
||||
### windowsPathsNoEscape
|
||||
|
||||
Use `\\` as a path separator _only_, and _never_ as an escape
|
||||
character. If set, all `\\` characters are replaced with `/` in
|
||||
the pattern. Note that this makes it **impossible** to match
|
||||
against paths containing literal glob pattern characters, but
|
||||
allows matching with patterns constructed using `path.join()` and
|
||||
`path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||
behavior of earlier versions on Windows. Please use with
|
||||
caution, and be mindful of [the caveat about Windows
|
||||
paths](#windows).
|
||||
|
||||
For legacy reasons, this is also set if
|
||||
`options.allowWindowsEscape` is set to the exact value `false`.
|
||||
|
||||
### windowsNoMagicRoot
|
||||
|
||||
When a pattern starts with a UNC path or drive letter, and in
|
||||
`nocase:true` mode, do not convert the root portions of the
|
||||
pattern into a case-insensitive regular expression, and instead
|
||||
leave them as strings.
|
||||
|
||||
This is the default when the platform is `win32` and
|
||||
`nocase:true` is set.
|
||||
|
||||
### preserveMultipleSlashes
|
||||
|
||||
By default, multiple `/` characters (other than the leading `//`
|
||||
in a UNC path, see "UNC Paths" above) are treated as a single
|
||||
`/`.
|
||||
|
||||
That is, a pattern like `a///b` will match the file path `a/b`.
|
||||
|
||||
Set `preserveMultipleSlashes: true` to suppress this behavior.
|
||||
|
||||
### optimizationLevel
|
||||
|
||||
A number indicating the level of optimization that should be done
|
||||
to the pattern prior to parsing and using it for matches.
|
||||
|
||||
Globstar parts `**` are always converted to `*` when `noglobstar`
|
||||
is set, and multiple adjacent `**` parts are converted into a
|
||||
single `**` (ie, `a/**/**/b` will be treated as `a/**/b`, as this
|
||||
is equivalent in all cases).
|
||||
|
||||
- `0` - Make no further changes. In this mode, `.` and `..` are
|
||||
maintained in the pattern, meaning that they must also appear
|
||||
in the same position in the test path string. Eg, a pattern
|
||||
like `a/*/../c` will match the string `a/b/../c` but not the
|
||||
string `a/c`.
|
||||
- `1` - (default) Remove cases where a double-dot `..` follows a
|
||||
pattern portion that is not `**`, `.`, `..`, or empty `''`. For
|
||||
example, the pattern `./a/b/../*` is converted to `./a/*`, and
|
||||
so it will match the path string `./a/c`, but not the path
|
||||
string `./a/b/../c`. Dots and empty path portions in the
|
||||
pattern are preserved.
|
||||
- `2` (or higher) - Much more aggressive optimizations, suitable
|
||||
for use with file-walking cases:
|
||||
|
||||
- Remove cases where a double-dot `..` follows a pattern
|
||||
portion that is not `**`, `.`, or empty `''`. Remove empty
|
||||
and `.` portions of the pattern, where safe to do so (ie,
|
||||
anywhere other than the last position, the first position, or
|
||||
the second position in a pattern starting with `/`, as this
|
||||
may indicate a UNC path on Windows).
|
||||
- Convert patterns containing `<pre>/**/../<p>/<rest>` into the
|
||||
equivalent `<pre>/{..,**}/<p>/<rest>`, where `<p>` is a
|
||||
a pattern portion other than `.`, `..`, `**`, or empty
|
||||
`''`.
|
||||
- Dedupe patterns where a `**` portion is present in one and
|
||||
omitted in another, and it is not the final path portion, and
|
||||
they are otherwise equivalent. So `{a/**/b,a/b}` becomes
|
||||
`a/**/b`, because `**` matches against an empty path portion.
|
||||
- Dedupe patterns where a `*` portion is present in one, and a
|
||||
non-dot pattern other than `**`, `.`, `..`, or `''` is in the
|
||||
same position in the other. So `a/{*,x}/b` becomes `a/*/b`,
|
||||
because `*` can match against `x`.
|
||||
|
||||
While these optimizations improve the performance of
|
||||
file-walking use cases such as [glob](http://npm.im/glob) (ie,
|
||||
the reason this module exists), there are cases where it will
|
||||
fail to match a literal string that would have been matched in
|
||||
optimization level 1 or 0.
|
||||
|
||||
Specifically, while the `Minimatch.match()` method will
|
||||
optimize the file path string in the same ways, resulting in
|
||||
the same matches, it will fail when tested with the regular
|
||||
expression provided by `Minimatch.makeRe()`, unless the path
|
||||
string is first processed with
|
||||
`minimatch.levelTwoFileOptimize()` or similar.
|
||||
|
||||
### platform
|
||||
|
||||
When set to `win32`, this will trigger all windows-specific
|
||||
behaviors (special handling for UNC paths, and treating `\` as
|
||||
separators in file paths for comparison.)
|
||||
|
||||
Defaults to the value of `process.platform`.
|
||||
|
||||
## Comparisons to other fnmatch/glob implementations
|
||||
|
||||
While strict compliance with the existing standards is a
|
||||
worthwhile goal, some discrepancies exist between minimatch and
|
||||
other implementations. Some are intentional, and some are
|
||||
unavoidable.
|
||||
|
||||
If the pattern starts with a `!` character, then it is negated. Set the
|
||||
`nonegate` flag to suppress this behavior, and treat leading `!`
|
||||
characters normally. This is perhaps relevant if you wish to start the
|
||||
pattern with a negative extglob pattern like `!(a|B)`. Multiple `!`
|
||||
characters at the start of a pattern will negate the pattern multiple
|
||||
times.
|
||||
|
||||
If a pattern starts with `#`, then it is treated as a comment, and
|
||||
will not match anything. Use `\#` to match a literal `#` at the
|
||||
start of a line, or set the `nocomment` flag to suppress this behavior.
|
||||
|
||||
The double-star character `**` is supported by default, unless the
|
||||
`noglobstar` flag is set. This is supported in the manner of bsdglob
|
||||
and bash 4.1, where `**` only has special significance if it is the only
|
||||
thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but
|
||||
`a/**b` will not.
|
||||
|
||||
If an escaped pattern has no matches, and the `nonull` flag is set,
|
||||
then minimatch.match returns the pattern as-provided, rather than
|
||||
interpreting the character escapes. For example,
|
||||
`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than
|
||||
`"*a?"`. This is akin to setting the `nullglob` option in bash, except
|
||||
that it does not resolve escaped pattern characters.
|
||||
|
||||
If brace expansion is not disabled, then it is performed before any
|
||||
other interpretation of the glob pattern. Thus, a pattern like
|
||||
`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded
|
||||
**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are
|
||||
checked for validity. Since those two are valid, matching proceeds.
|
||||
|
||||
Negated extglob patterns are handled as closely as possible to
|
||||
Bash semantics, but there are some cases with negative extglobs
|
||||
which are exceedingly difficult to express in a JavaScript
|
||||
regular expression. In particular the negated pattern
|
||||
`<start>!(<pattern>*|)*` will in bash match anything that does
|
||||
not start with `<start><pattern>`. However,
|
||||
`<start>!(<pattern>*)*` _will_ match paths starting with
|
||||
`<start><pattern>`, because the empty string can match against
|
||||
the negated portion. In this library, `<start>!(<pattern>*|)*`
|
||||
will _not_ match any pattern starting with `<start>`, due to a
|
||||
difference in precisely which patterns are considered "greedy" in
|
||||
Regular Expressions vs bash path expansion. This may be fixable,
|
||||
but not without incurring some complexity and performance costs,
|
||||
and the trade-off seems to not be worth pursuing.
|
||||
|
||||
Note that `fnmatch(3)` in libc is an extremely naive string comparison
|
||||
matcher, which does not do anything special for slashes. This library is
|
||||
designed to be used in glob searching and file walkers, and so it does do
|
||||
special things with `/`. Thus, `foo*` will not match `foo/bar` in this
|
||||
library, even though it would in `fnmatch(3)`.
|
||||
2
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.d.ts
generated
vendored
Normal file
2
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.d.ts
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
export declare const assertValidPattern: (pattern: any) => void;
|
||||
//# sourceMappingURL=assert-valid-pattern.d.ts.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.d.ts.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"assert-valid-pattern.d.ts","sourceRoot":"","sources":["../../src/assert-valid-pattern.ts"],"names":[],"mappings":"AACA,eAAO,MAAM,kBAAkB,EAAE,CAAC,OAAO,EAAE,GAAG,KAAK,IAUlD,CAAA"}
|
||||
14
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
generated
vendored
Normal file
14
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.assertValidPattern = void 0;
|
||||
const MAX_PATTERN_LENGTH = 1024 * 64;
|
||||
const assertValidPattern = (pattern) => {
|
||||
if (typeof pattern !== 'string') {
|
||||
throw new TypeError('invalid pattern');
|
||||
}
|
||||
if (pattern.length > MAX_PATTERN_LENGTH) {
|
||||
throw new TypeError('pattern is too long');
|
||||
}
|
||||
};
|
||||
exports.assertValidPattern = assertValidPattern;
|
||||
//# sourceMappingURL=assert-valid-pattern.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"assert-valid-pattern.js","sourceRoot":"","sources":["../../src/assert-valid-pattern.ts"],"names":[],"mappings":";;;AAAA,MAAM,kBAAkB,GAAG,IAAI,GAAG,EAAE,CAAA;AAC7B,MAAM,kBAAkB,GAA2B,CACxD,OAAY,EACe,EAAE;IAC7B,IAAI,OAAO,OAAO,KAAK,QAAQ,EAAE;QAC/B,MAAM,IAAI,SAAS,CAAC,iBAAiB,CAAC,CAAA;KACvC;IAED,IAAI,OAAO,CAAC,MAAM,GAAG,kBAAkB,EAAE;QACvC,MAAM,IAAI,SAAS,CAAC,qBAAqB,CAAC,CAAA;KAC3C;AACH,CAAC,CAAA;AAVY,QAAA,kBAAkB,sBAU9B","sourcesContent":["const MAX_PATTERN_LENGTH = 1024 * 64\nexport const assertValidPattern: (pattern: any) => void = (\n pattern: any\n): asserts pattern is string => {\n if (typeof pattern !== 'string') {\n throw new TypeError('invalid pattern')\n }\n\n if (pattern.length > MAX_PATTERN_LENGTH) {\n throw new TypeError('pattern is too long')\n }\n}\n"]}
|
||||
20
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.d.ts
generated
vendored
Normal file
20
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.d.ts
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
import { MinimatchOptions, MMRegExp } from './index.js';
|
||||
export type ExtglobType = '!' | '?' | '+' | '*' | '@';
|
||||
export declare class AST {
|
||||
#private;
|
||||
type: ExtglobType | null;
|
||||
constructor(type: ExtglobType | null, parent?: AST, options?: MinimatchOptions);
|
||||
get hasMagic(): boolean | undefined;
|
||||
toString(): string;
|
||||
push(...parts: (string | AST)[]): void;
|
||||
toJSON(): any[];
|
||||
isStart(): boolean;
|
||||
isEnd(): boolean;
|
||||
copyIn(part: AST | string): void;
|
||||
clone(parent: AST): AST;
|
||||
static fromGlob(pattern: string, options?: MinimatchOptions): AST;
|
||||
toMMPattern(): MMRegExp | string;
|
||||
get options(): MinimatchOptions;
|
||||
toRegExpSource(allowDot?: boolean): [re: string, body: string, hasMagic: boolean, uflag: boolean];
|
||||
}
|
||||
//# sourceMappingURL=ast.d.ts.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.d.ts.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"ast.d.ts","sourceRoot":"","sources":["../../src/ast.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,gBAAgB,EAAE,QAAQ,EAAE,MAAM,YAAY,CAAA;AAwCvD,MAAM,MAAM,WAAW,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,CAAA;AAkCrD,qBAAa,GAAG;;IACd,IAAI,EAAE,WAAW,GAAG,IAAI,CAAA;gBAiBtB,IAAI,EAAE,WAAW,GAAG,IAAI,EACxB,MAAM,CAAC,EAAE,GAAG,EACZ,OAAO,GAAE,gBAAqB;IAahC,IAAI,QAAQ,IAAI,OAAO,GAAG,SAAS,CAUlC;IAGD,QAAQ,IAAI,MAAM;IA+ClB,IAAI,CAAC,GAAG,KAAK,EAAE,CAAC,MAAM,GAAG,GAAG,CAAC,EAAE;IAY/B,MAAM;IAgBN,OAAO,IAAI,OAAO;IAgBlB,KAAK,IAAI,OAAO;IAYhB,MAAM,CAAC,IAAI,EAAE,GAAG,GAAG,MAAM;IAKzB,KAAK,CAAC,MAAM,EAAE,GAAG;IAsIjB,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,gBAAqB;IAQ/D,WAAW,IAAI,QAAQ,GAAG,MAAM;IA2BhC,IAAI,OAAO,qBAEV;IAuED,cAAc,CACZ,QAAQ,CAAC,EAAE,OAAO,GACjB,CAAC,EAAE,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,OAAO,EAAE,KAAK,EAAE,OAAO,CAAC;CAiMjE"}
|
||||
592
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.js
generated
vendored
Normal file
592
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.js
generated
vendored
Normal file
@@ -0,0 +1,592 @@
|
||||
"use strict";
|
||||
// parse a single path portion
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AST = void 0;
|
||||
const brace_expressions_js_1 = require("./brace-expressions.js");
|
||||
const unescape_js_1 = require("./unescape.js");
|
||||
const types = new Set(['!', '?', '+', '*', '@']);
|
||||
const isExtglobType = (c) => types.has(c);
|
||||
// Patterns that get prepended to bind to the start of either the
|
||||
// entire string, or just a single path portion, to prevent dots
|
||||
// and/or traversal patterns, when needed.
|
||||
// Exts don't need the ^ or / bit, because the root binds that already.
|
||||
const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
|
||||
const startNoDot = '(?!\\.)';
|
||||
// characters that indicate a start of pattern needs the "no dots" bit,
|
||||
// because a dot *might* be matched. ( is not in the list, because in
|
||||
// the case of a child extglob, it will handle the prevention itself.
|
||||
const addPatternStart = new Set(['[', '.']);
|
||||
// cases where traversal is A-OK, no dot prevention needed
|
||||
const justDots = new Set(['..', '.']);
|
||||
const reSpecials = new Set('().*{}+?[]^$\\!');
|
||||
const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
|
||||
// any single thing other than /
|
||||
const qmark = '[^/]';
|
||||
// * => any number of characters
|
||||
const star = qmark + '*?';
|
||||
// use + when we need to ensure that *something* matches, because the * is
|
||||
// the only thing in the path portion.
|
||||
const starNoEmpty = qmark + '+?';
|
||||
// remove the \ chars that we added if we end up doing a nonmagic compare
|
||||
// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
|
||||
class AST {
|
||||
type;
|
||||
#root;
|
||||
#hasMagic;
|
||||
#uflag = false;
|
||||
#parts = [];
|
||||
#parent;
|
||||
#parentIndex;
|
||||
#negs;
|
||||
#filledNegs = false;
|
||||
#options;
|
||||
#toString;
|
||||
// set to true if it's an extglob with no children
|
||||
// (which really means one child of '')
|
||||
#emptyExt = false;
|
||||
constructor(type, parent, options = {}) {
|
||||
this.type = type;
|
||||
// extglobs are inherently magical
|
||||
if (type)
|
||||
this.#hasMagic = true;
|
||||
this.#parent = parent;
|
||||
this.#root = this.#parent ? this.#parent.#root : this;
|
||||
this.#options = this.#root === this ? options : this.#root.#options;
|
||||
this.#negs = this.#root === this ? [] : this.#root.#negs;
|
||||
if (type === '!' && !this.#root.#filledNegs)
|
||||
this.#negs.push(this);
|
||||
this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
|
||||
}
|
||||
get hasMagic() {
|
||||
/* c8 ignore start */
|
||||
if (this.#hasMagic !== undefined)
|
||||
return this.#hasMagic;
|
||||
/* c8 ignore stop */
|
||||
for (const p of this.#parts) {
|
||||
if (typeof p === 'string')
|
||||
continue;
|
||||
if (p.type || p.hasMagic)
|
||||
return (this.#hasMagic = true);
|
||||
}
|
||||
// note: will be undefined until we generate the regexp src and find out
|
||||
return this.#hasMagic;
|
||||
}
|
||||
// reconstructs the pattern
|
||||
toString() {
|
||||
if (this.#toString !== undefined)
|
||||
return this.#toString;
|
||||
if (!this.type) {
|
||||
return (this.#toString = this.#parts.map(p => String(p)).join(''));
|
||||
}
|
||||
else {
|
||||
return (this.#toString =
|
||||
this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
|
||||
}
|
||||
}
|
||||
#fillNegs() {
|
||||
/* c8 ignore start */
|
||||
if (this !== this.#root)
|
||||
throw new Error('should only call on root');
|
||||
if (this.#filledNegs)
|
||||
return this;
|
||||
/* c8 ignore stop */
|
||||
// call toString() once to fill this out
|
||||
this.toString();
|
||||
this.#filledNegs = true;
|
||||
let n;
|
||||
while ((n = this.#negs.pop())) {
|
||||
if (n.type !== '!')
|
||||
continue;
|
||||
// walk up the tree, appending everthing that comes AFTER parentIndex
|
||||
let p = n;
|
||||
let pp = p.#parent;
|
||||
while (pp) {
|
||||
for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
|
||||
for (const part of n.#parts) {
|
||||
/* c8 ignore start */
|
||||
if (typeof part === 'string') {
|
||||
throw new Error('string part in extglob AST??');
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
part.copyIn(pp.#parts[i]);
|
||||
}
|
||||
}
|
||||
p = pp;
|
||||
pp = p.#parent;
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
push(...parts) {
|
||||
for (const p of parts) {
|
||||
if (p === '')
|
||||
continue;
|
||||
/* c8 ignore start */
|
||||
if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
|
||||
throw new Error('invalid part: ' + p);
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
this.#parts.push(p);
|
||||
}
|
||||
}
|
||||
toJSON() {
|
||||
const ret = this.type === null
|
||||
? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
|
||||
: [this.type, ...this.#parts.map(p => p.toJSON())];
|
||||
if (this.isStart() && !this.type)
|
||||
ret.unshift([]);
|
||||
if (this.isEnd() &&
|
||||
(this === this.#root ||
|
||||
(this.#root.#filledNegs && this.#parent?.type === '!'))) {
|
||||
ret.push({});
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
isStart() {
|
||||
if (this.#root === this)
|
||||
return true;
|
||||
// if (this.type) return !!this.#parent?.isStart()
|
||||
if (!this.#parent?.isStart())
|
||||
return false;
|
||||
if (this.#parentIndex === 0)
|
||||
return true;
|
||||
// if everything AHEAD of this is a negation, then it's still the "start"
|
||||
const p = this.#parent;
|
||||
for (let i = 0; i < this.#parentIndex; i++) {
|
||||
const pp = p.#parts[i];
|
||||
if (!(pp instanceof AST && pp.type === '!')) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
isEnd() {
|
||||
if (this.#root === this)
|
||||
return true;
|
||||
if (this.#parent?.type === '!')
|
||||
return true;
|
||||
if (!this.#parent?.isEnd())
|
||||
return false;
|
||||
if (!this.type)
|
||||
return this.#parent?.isEnd();
|
||||
// if not root, it'll always have a parent
|
||||
/* c8 ignore start */
|
||||
const pl = this.#parent ? this.#parent.#parts.length : 0;
|
||||
/* c8 ignore stop */
|
||||
return this.#parentIndex === pl - 1;
|
||||
}
|
||||
copyIn(part) {
|
||||
if (typeof part === 'string')
|
||||
this.push(part);
|
||||
else
|
||||
this.push(part.clone(this));
|
||||
}
|
||||
clone(parent) {
|
||||
const c = new AST(this.type, parent);
|
||||
for (const p of this.#parts) {
|
||||
c.copyIn(p);
|
||||
}
|
||||
return c;
|
||||
}
|
||||
static #parseAST(str, ast, pos, opt) {
|
||||
let escaping = false;
|
||||
let inBrace = false;
|
||||
let braceStart = -1;
|
||||
let braceNeg = false;
|
||||
if (ast.type === null) {
|
||||
// outside of a extglob, append until we find a start
|
||||
let i = pos;
|
||||
let acc = '';
|
||||
while (i < str.length) {
|
||||
const c = str.charAt(i++);
|
||||
// still accumulate escapes at this point, but we do ignore
|
||||
// starts that are escaped
|
||||
if (escaping || c === '\\') {
|
||||
escaping = !escaping;
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
if (inBrace) {
|
||||
if (i === braceStart + 1) {
|
||||
if (c === '^' || c === '!') {
|
||||
braceNeg = true;
|
||||
}
|
||||
}
|
||||
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
|
||||
inBrace = false;
|
||||
}
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
else if (c === '[') {
|
||||
inBrace = true;
|
||||
braceStart = i;
|
||||
braceNeg = false;
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
|
||||
ast.push(acc);
|
||||
acc = '';
|
||||
const ext = new AST(c, ast);
|
||||
i = AST.#parseAST(str, ext, i, opt);
|
||||
ast.push(ext);
|
||||
continue;
|
||||
}
|
||||
acc += c;
|
||||
}
|
||||
ast.push(acc);
|
||||
return i;
|
||||
}
|
||||
// some kind of extglob, pos is at the (
|
||||
// find the next | or )
|
||||
let i = pos + 1;
|
||||
let part = new AST(null, ast);
|
||||
const parts = [];
|
||||
let acc = '';
|
||||
while (i < str.length) {
|
||||
const c = str.charAt(i++);
|
||||
// still accumulate escapes at this point, but we do ignore
|
||||
// starts that are escaped
|
||||
if (escaping || c === '\\') {
|
||||
escaping = !escaping;
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
if (inBrace) {
|
||||
if (i === braceStart + 1) {
|
||||
if (c === '^' || c === '!') {
|
||||
braceNeg = true;
|
||||
}
|
||||
}
|
||||
else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
|
||||
inBrace = false;
|
||||
}
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
else if (c === '[') {
|
||||
inBrace = true;
|
||||
braceStart = i;
|
||||
braceNeg = false;
|
||||
acc += c;
|
||||
continue;
|
||||
}
|
||||
if (isExtglobType(c) && str.charAt(i) === '(') {
|
||||
part.push(acc);
|
||||
acc = '';
|
||||
const ext = new AST(c, part);
|
||||
part.push(ext);
|
||||
i = AST.#parseAST(str, ext, i, opt);
|
||||
continue;
|
||||
}
|
||||
if (c === '|') {
|
||||
part.push(acc);
|
||||
acc = '';
|
||||
parts.push(part);
|
||||
part = new AST(null, ast);
|
||||
continue;
|
||||
}
|
||||
if (c === ')') {
|
||||
if (acc === '' && ast.#parts.length === 0) {
|
||||
ast.#emptyExt = true;
|
||||
}
|
||||
part.push(acc);
|
||||
acc = '';
|
||||
ast.push(...parts, part);
|
||||
return i;
|
||||
}
|
||||
acc += c;
|
||||
}
|
||||
// unfinished extglob
|
||||
// if we got here, it was a malformed extglob! not an extglob, but
|
||||
// maybe something else in there.
|
||||
ast.type = null;
|
||||
ast.#hasMagic = undefined;
|
||||
ast.#parts = [str.substring(pos - 1)];
|
||||
return i;
|
||||
}
|
||||
static fromGlob(pattern, options = {}) {
|
||||
const ast = new AST(null, undefined, options);
|
||||
AST.#parseAST(pattern, ast, 0, options);
|
||||
return ast;
|
||||
}
|
||||
// returns the regular expression if there's magic, or the unescaped
|
||||
// string if not.
|
||||
toMMPattern() {
|
||||
// should only be called on root
|
||||
/* c8 ignore start */
|
||||
if (this !== this.#root)
|
||||
return this.#root.toMMPattern();
|
||||
/* c8 ignore stop */
|
||||
const glob = this.toString();
|
||||
const [re, body, hasMagic, uflag] = this.toRegExpSource();
|
||||
// if we're in nocase mode, and not nocaseMagicOnly, then we do
|
||||
// still need a regular expression if we have to case-insensitively
|
||||
// match capital/lowercase characters.
|
||||
const anyMagic = hasMagic ||
|
||||
this.#hasMagic ||
|
||||
(this.#options.nocase &&
|
||||
!this.#options.nocaseMagicOnly &&
|
||||
glob.toUpperCase() !== glob.toLowerCase());
|
||||
if (!anyMagic) {
|
||||
return body;
|
||||
}
|
||||
const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
|
||||
return Object.assign(new RegExp(`^${re}$`, flags), {
|
||||
_src: re,
|
||||
_glob: glob,
|
||||
});
|
||||
}
|
||||
get options() {
|
||||
return this.#options;
|
||||
}
|
||||
// returns the string match, the regexp source, whether there's magic
|
||||
// in the regexp (so a regular expression is required) and whether or
|
||||
// not the uflag is needed for the regular expression (for posix classes)
|
||||
// TODO: instead of injecting the start/end at this point, just return
|
||||
// the BODY of the regexp, along with the start/end portions suitable
|
||||
// for binding the start/end in either a joined full-path makeRe context
|
||||
// (where we bind to (^|/), or a standalone matchPart context (where
|
||||
// we bind to ^, and not /). Otherwise slashes get duped!
|
||||
//
|
||||
// In part-matching mode, the start is:
|
||||
// - if not isStart: nothing
|
||||
// - if traversal possible, but not allowed: ^(?!\.\.?$)
|
||||
// - if dots allowed or not possible: ^
|
||||
// - if dots possible and not allowed: ^(?!\.)
|
||||
// end is:
|
||||
// - if not isEnd(): nothing
|
||||
// - else: $
|
||||
//
|
||||
// In full-path matching mode, we put the slash at the START of the
|
||||
// pattern, so start is:
|
||||
// - if first pattern: same as part-matching mode
|
||||
// - if not isStart(): nothing
|
||||
// - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
|
||||
// - if dots allowed or not possible: /
|
||||
// - if dots possible and not allowed: /(?!\.)
|
||||
// end is:
|
||||
// - if last pattern, same as part-matching mode
|
||||
// - else nothing
|
||||
//
|
||||
// Always put the (?:$|/) on negated tails, though, because that has to be
|
||||
// there to bind the end of the negated pattern portion, and it's easier to
|
||||
// just stick it in now rather than try to inject it later in the middle of
|
||||
// the pattern.
|
||||
//
|
||||
// We can just always return the same end, and leave it up to the caller
|
||||
// to know whether it's going to be used joined or in parts.
|
||||
// And, if the start is adjusted slightly, can do the same there:
|
||||
// - if not isStart: nothing
|
||||
// - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
|
||||
// - if dots allowed or not possible: (?:/|^)
|
||||
// - if dots possible and not allowed: (?:/|^)(?!\.)
|
||||
//
|
||||
// But it's better to have a simpler binding without a conditional, for
|
||||
// performance, so probably better to return both start options.
|
||||
//
|
||||
// Then the caller just ignores the end if it's not the first pattern,
|
||||
// and the start always gets applied.
|
||||
//
|
||||
// But that's always going to be $ if it's the ending pattern, or nothing,
|
||||
// so the caller can just attach $ at the end of the pattern when building.
|
||||
//
|
||||
// So the todo is:
|
||||
// - better detect what kind of start is needed
|
||||
// - return both flavors of starting pattern
|
||||
// - attach $ at the end of the pattern when creating the actual RegExp
|
||||
//
|
||||
// Ah, but wait, no, that all only applies to the root when the first pattern
|
||||
// is not an extglob. If the first pattern IS an extglob, then we need all
|
||||
// that dot prevention biz to live in the extglob portions, because eg
|
||||
// +(*|.x*) can match .xy but not .yx.
|
||||
//
|
||||
// So, return the two flavors if it's #root and the first child is not an
|
||||
// AST, otherwise leave it to the child AST to handle it, and there,
|
||||
// use the (?:^|/) style of start binding.
|
||||
//
|
||||
// Even simplified further:
|
||||
// - Since the start for a join is eg /(?!\.) and the start for a part
|
||||
// is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
|
||||
// or start or whatever) and prepend ^ or / at the Regexp construction.
|
||||
toRegExpSource(allowDot) {
|
||||
const dot = allowDot ?? !!this.#options.dot;
|
||||
if (this.#root === this)
|
||||
this.#fillNegs();
|
||||
if (!this.type) {
|
||||
const noEmpty = this.isStart() && this.isEnd();
|
||||
const src = this.#parts
|
||||
.map(p => {
|
||||
const [re, _, hasMagic, uflag] = typeof p === 'string'
|
||||
? AST.#parseGlob(p, this.#hasMagic, noEmpty)
|
||||
: p.toRegExpSource(allowDot);
|
||||
this.#hasMagic = this.#hasMagic || hasMagic;
|
||||
this.#uflag = this.#uflag || uflag;
|
||||
return re;
|
||||
})
|
||||
.join('');
|
||||
let start = '';
|
||||
if (this.isStart()) {
|
||||
if (typeof this.#parts[0] === 'string') {
|
||||
// this is the string that will match the start of the pattern,
|
||||
// so we need to protect against dots and such.
|
||||
// '.' and '..' cannot match unless the pattern is that exactly,
|
||||
// even if it starts with . or dot:true is set.
|
||||
const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
|
||||
if (!dotTravAllowed) {
|
||||
const aps = addPatternStart;
|
||||
// check if we have a possibility of matching . or ..,
|
||||
// and prevent that.
|
||||
const needNoTrav =
|
||||
// dots are allowed, and the pattern starts with [ or .
|
||||
(dot && aps.has(src.charAt(0))) ||
|
||||
// the pattern starts with \., and then [ or .
|
||||
(src.startsWith('\\.') && aps.has(src.charAt(2))) ||
|
||||
// the pattern starts with \.\., and then [ or .
|
||||
(src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
|
||||
// no need to prevent dots if it can't match a dot, or if a
|
||||
// sub-pattern will be preventing it anyway.
|
||||
const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
|
||||
start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
|
||||
}
|
||||
}
|
||||
}
|
||||
// append the "end of path portion" pattern to negation tails
|
||||
let end = '';
|
||||
if (this.isEnd() &&
|
||||
this.#root.#filledNegs &&
|
||||
this.#parent?.type === '!') {
|
||||
end = '(?:$|\\/)';
|
||||
}
|
||||
const final = start + src + end;
|
||||
return [
|
||||
final,
|
||||
(0, unescape_js_1.unescape)(src),
|
||||
(this.#hasMagic = !!this.#hasMagic),
|
||||
this.#uflag,
|
||||
];
|
||||
}
|
||||
// We need to calculate the body *twice* if it's a repeat pattern
|
||||
// at the start, once in nodot mode, then again in dot mode, so a
|
||||
// pattern like *(?) can match 'x.y'
|
||||
const repeated = this.type === '*' || this.type === '+';
|
||||
// some kind of extglob
|
||||
const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
|
||||
let body = this.#partsToRegExp(dot);
|
||||
if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
|
||||
// invalid extglob, has to at least be *something* present, if it's
|
||||
// the entire path portion.
|
||||
const s = this.toString();
|
||||
this.#parts = [s];
|
||||
this.type = null;
|
||||
this.#hasMagic = undefined;
|
||||
return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
|
||||
}
|
||||
// XXX abstract out this map method
|
||||
let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
|
||||
? ''
|
||||
: this.#partsToRegExp(true);
|
||||
if (bodyDotAllowed === body) {
|
||||
bodyDotAllowed = '';
|
||||
}
|
||||
if (bodyDotAllowed) {
|
||||
body = `(?:${body})(?:${bodyDotAllowed})*?`;
|
||||
}
|
||||
// an empty !() is exactly equivalent to a starNoEmpty
|
||||
let final = '';
|
||||
if (this.type === '!' && this.#emptyExt) {
|
||||
final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
|
||||
}
|
||||
else {
|
||||
const close = this.type === '!'
|
||||
? // !() must match something,but !(x) can match ''
|
||||
'))' +
|
||||
(this.isStart() && !dot && !allowDot ? startNoDot : '') +
|
||||
star +
|
||||
')'
|
||||
: this.type === '@'
|
||||
? ')'
|
||||
: this.type === '?'
|
||||
? ')?'
|
||||
: this.type === '+' && bodyDotAllowed
|
||||
? ')'
|
||||
: this.type === '*' && bodyDotAllowed
|
||||
? `)?`
|
||||
: `)${this.type}`;
|
||||
final = start + body + close;
|
||||
}
|
||||
return [
|
||||
final,
|
||||
(0, unescape_js_1.unescape)(body),
|
||||
(this.#hasMagic = !!this.#hasMagic),
|
||||
this.#uflag,
|
||||
];
|
||||
}
|
||||
#partsToRegExp(dot) {
|
||||
return this.#parts
|
||||
.map(p => {
|
||||
// extglob ASTs should only contain parent ASTs
|
||||
/* c8 ignore start */
|
||||
if (typeof p === 'string') {
|
||||
throw new Error('string type in extglob ast??');
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
// can ignore hasMagic, because extglobs are already always magic
|
||||
const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
|
||||
this.#uflag = this.#uflag || uflag;
|
||||
return re;
|
||||
})
|
||||
.filter(p => !(this.isStart() && this.isEnd()) || !!p)
|
||||
.join('|');
|
||||
}
|
||||
static #parseGlob(glob, hasMagic, noEmpty = false) {
|
||||
let escaping = false;
|
||||
let re = '';
|
||||
let uflag = false;
|
||||
for (let i = 0; i < glob.length; i++) {
|
||||
const c = glob.charAt(i);
|
||||
if (escaping) {
|
||||
escaping = false;
|
||||
re += (reSpecials.has(c) ? '\\' : '') + c;
|
||||
continue;
|
||||
}
|
||||
if (c === '\\') {
|
||||
if (i === glob.length - 1) {
|
||||
re += '\\\\';
|
||||
}
|
||||
else {
|
||||
escaping = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (c === '[') {
|
||||
const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
|
||||
if (consumed) {
|
||||
re += src;
|
||||
uflag = uflag || needUflag;
|
||||
i += consumed - 1;
|
||||
hasMagic = hasMagic || magic;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
if (c === '*') {
|
||||
if (noEmpty && glob === '*')
|
||||
re += starNoEmpty;
|
||||
else
|
||||
re += star;
|
||||
hasMagic = true;
|
||||
continue;
|
||||
}
|
||||
if (c === '?') {
|
||||
re += qmark;
|
||||
hasMagic = true;
|
||||
continue;
|
||||
}
|
||||
re += regExpEscape(c);
|
||||
}
|
||||
return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
|
||||
}
|
||||
}
|
||||
exports.AST = AST;
|
||||
//# sourceMappingURL=ast.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/ast.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
8
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.d.ts
generated
vendored
Normal file
8
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.d.ts
generated
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
export type ParseClassResult = [
|
||||
src: string,
|
||||
uFlag: boolean,
|
||||
consumed: number,
|
||||
hasMagic: boolean
|
||||
];
|
||||
export declare const parseClass: (glob: string, position: number) => ParseClassResult;
|
||||
//# sourceMappingURL=brace-expressions.d.ts.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.d.ts.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"brace-expressions.d.ts","sourceRoot":"","sources":["../../src/brace-expressions.ts"],"names":[],"mappings":"AA+BA,MAAM,MAAM,gBAAgB,GAAG;IAC7B,GAAG,EAAE,MAAM;IACX,KAAK,EAAE,OAAO;IACd,QAAQ,EAAE,MAAM;IAChB,QAAQ,EAAE,OAAO;CAClB,CAAA;AAQD,eAAO,MAAM,UAAU,SACf,MAAM,YACF,MAAM,qBA8HjB,CAAA"}
|
||||
152
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.js
generated
vendored
Normal file
152
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.js
generated
vendored
Normal file
@@ -0,0 +1,152 @@
|
||||
"use strict";
|
||||
// translate the various posix character classes into unicode properties
|
||||
// this works across all unicode locales
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.parseClass = void 0;
|
||||
// { <posix class>: [<translation>, /u flag required, negated]
|
||||
const posixClasses = {
|
||||
'[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
|
||||
'[:alpha:]': ['\\p{L}\\p{Nl}', true],
|
||||
'[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
|
||||
'[:blank:]': ['\\p{Zs}\\t', true],
|
||||
'[:cntrl:]': ['\\p{Cc}', true],
|
||||
'[:digit:]': ['\\p{Nd}', true],
|
||||
'[:graph:]': ['\\p{Z}\\p{C}', true, true],
|
||||
'[:lower:]': ['\\p{Ll}', true],
|
||||
'[:print:]': ['\\p{C}', true],
|
||||
'[:punct:]': ['\\p{P}', true],
|
||||
'[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
|
||||
'[:upper:]': ['\\p{Lu}', true],
|
||||
'[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
|
||||
'[:xdigit:]': ['A-Fa-f0-9', false],
|
||||
};
|
||||
// only need to escape a few things inside of brace expressions
|
||||
// escapes: [ \ ] -
|
||||
const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
|
||||
// escape all regexp magic characters
|
||||
const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
|
||||
// everything has already been escaped, we just have to join
|
||||
const rangesToString = (ranges) => ranges.join('');
|
||||
// takes a glob string at a posix brace expression, and returns
|
||||
// an equivalent regular expression source, and boolean indicating
|
||||
// whether the /u flag needs to be applied, and the number of chars
|
||||
// consumed to parse the character class.
|
||||
// This also removes out of order ranges, and returns ($.) if the
|
||||
// entire class just no good.
|
||||
const parseClass = (glob, position) => {
|
||||
const pos = position;
|
||||
/* c8 ignore start */
|
||||
if (glob.charAt(pos) !== '[') {
|
||||
throw new Error('not in a brace expression');
|
||||
}
|
||||
/* c8 ignore stop */
|
||||
const ranges = [];
|
||||
const negs = [];
|
||||
let i = pos + 1;
|
||||
let sawStart = false;
|
||||
let uflag = false;
|
||||
let escaping = false;
|
||||
let negate = false;
|
||||
let endPos = pos;
|
||||
let rangeStart = '';
|
||||
WHILE: while (i < glob.length) {
|
||||
const c = glob.charAt(i);
|
||||
if ((c === '!' || c === '^') && i === pos + 1) {
|
||||
negate = true;
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
if (c === ']' && sawStart && !escaping) {
|
||||
endPos = i + 1;
|
||||
break;
|
||||
}
|
||||
sawStart = true;
|
||||
if (c === '\\') {
|
||||
if (!escaping) {
|
||||
escaping = true;
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
// escaped \ char, fall through and treat like normal char
|
||||
}
|
||||
if (c === '[' && !escaping) {
|
||||
// either a posix class, a collation equivalent, or just a [
|
||||
for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
|
||||
if (glob.startsWith(cls, i)) {
|
||||
// invalid, [a-[] is fine, but not [a-[:alpha]]
|
||||
if (rangeStart) {
|
||||
return ['$.', false, glob.length - pos, true];
|
||||
}
|
||||
i += cls.length;
|
||||
if (neg)
|
||||
negs.push(unip);
|
||||
else
|
||||
ranges.push(unip);
|
||||
uflag = uflag || u;
|
||||
continue WHILE;
|
||||
}
|
||||
}
|
||||
}
|
||||
// now it's just a normal character, effectively
|
||||
escaping = false;
|
||||
if (rangeStart) {
|
||||
// throw this range away if it's not valid, but others
|
||||
// can still match.
|
||||
if (c > rangeStart) {
|
||||
ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
|
||||
}
|
||||
else if (c === rangeStart) {
|
||||
ranges.push(braceEscape(c));
|
||||
}
|
||||
rangeStart = '';
|
||||
i++;
|
||||
continue;
|
||||
}
|
||||
// now might be the start of a range.
|
||||
// can be either c-d or c-] or c<more...>] or c] at this point
|
||||
if (glob.startsWith('-]', i + 1)) {
|
||||
ranges.push(braceEscape(c + '-'));
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
if (glob.startsWith('-', i + 1)) {
|
||||
rangeStart = c;
|
||||
i += 2;
|
||||
continue;
|
||||
}
|
||||
// not the start of a range, just a single character
|
||||
ranges.push(braceEscape(c));
|
||||
i++;
|
||||
}
|
||||
if (endPos < i) {
|
||||
// didn't see the end of the class, not a valid class,
|
||||
// but might still be valid as a literal match.
|
||||
return ['', false, 0, false];
|
||||
}
|
||||
// if we got no ranges and no negates, then we have a range that
|
||||
// cannot possibly match anything, and that poisons the whole glob
|
||||
if (!ranges.length && !negs.length) {
|
||||
return ['$.', false, glob.length - pos, true];
|
||||
}
|
||||
// if we got one positive range, and it's a single character, then that's
|
||||
// not actually a magic pattern, it's just that one literal character.
|
||||
// we should not treat that as "magic", we should just return the literal
|
||||
// character. [_] is a perfectly valid way to escape glob magic chars.
|
||||
if (negs.length === 0 &&
|
||||
ranges.length === 1 &&
|
||||
/^\\?.$/.test(ranges[0]) &&
|
||||
!negate) {
|
||||
const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
|
||||
return [regexpEscape(r), false, endPos - pos, false];
|
||||
}
|
||||
const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
|
||||
const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
|
||||
const comb = ranges.length && negs.length
|
||||
? '(' + sranges + '|' + snegs + ')'
|
||||
: ranges.length
|
||||
? sranges
|
||||
: snegs;
|
||||
return [comb, uflag, endPos - pos, true];
|
||||
};
|
||||
exports.parseClass = parseClass;
|
||||
//# sourceMappingURL=brace-expressions.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/brace-expressions.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.d.ts
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import { MinimatchOptions } from './index.js';
|
||||
/**
|
||||
* Escape all magic characters in a glob pattern.
|
||||
*
|
||||
* If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
|
||||
* option is used, then characters are escaped by wrapping in `[]`, because
|
||||
* a magic character wrapped in a character class can only be satisfied by
|
||||
* that exact character. In this mode, `\` is _not_ escaped, because it is
|
||||
* not interpreted as a magic character, but instead as a path separator.
|
||||
*/
|
||||
export declare const escape: (s: string, { windowsPathsNoEscape, }?: Pick<MinimatchOptions, 'windowsPathsNoEscape'>) => string;
|
||||
//# sourceMappingURL=escape.d.ts.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.d.ts.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"escape.d.ts","sourceRoot":"","sources":["../../src/escape.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,gBAAgB,EAAE,MAAM,YAAY,CAAA;AAC7C;;;;;;;;GAQG;AACH,eAAO,MAAM,MAAM,MACd,MAAM,8BAGN,KAAK,gBAAgB,EAAE,sBAAsB,CAAC,WAQlD,CAAA"}
|
||||
22
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.js
generated
vendored
Normal file
22
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.js
generated
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.escape = void 0;
|
||||
/**
|
||||
* Escape all magic characters in a glob pattern.
|
||||
*
|
||||
* If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
|
||||
* option is used, then characters are escaped by wrapping in `[]`, because
|
||||
* a magic character wrapped in a character class can only be satisfied by
|
||||
* that exact character. In this mode, `\` is _not_ escaped, because it is
|
||||
* not interpreted as a magic character, but instead as a path separator.
|
||||
*/
|
||||
const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
|
||||
// don't need to escape +@! because we escape the parens
|
||||
// that make those magic, and escaping ! as [!] isn't valid,
|
||||
// because [!]] is a valid glob class meaning not ']'.
|
||||
return windowsPathsNoEscape
|
||||
? s.replace(/[?*()[\]]/g, '[$&]')
|
||||
: s.replace(/[?*()[\]\\]/g, '\\$&');
|
||||
};
|
||||
exports.escape = escape;
|
||||
//# sourceMappingURL=escape.js.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/escape.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"escape.js","sourceRoot":"","sources":["../../src/escape.ts"],"names":[],"mappings":";;;AACA;;;;;;;;GAQG;AACI,MAAM,MAAM,GAAG,CACpB,CAAS,EACT,EACE,oBAAoB,GAAG,KAAK,MACsB,EAAE,EACtD,EAAE;IACF,wDAAwD;IACxD,4DAA4D;IAC5D,sDAAsD;IACtD,OAAO,oBAAoB;QACzB,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,EAAE,MAAM,CAAC;QACjC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,cAAc,EAAE,MAAM,CAAC,CAAA;AACvC,CAAC,CAAA;AAZY,QAAA,MAAM,UAYlB","sourcesContent":["import { MinimatchOptions } from './index.js'\n/**\n * Escape all magic characters in a glob pattern.\n *\n * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}\n * option is used, then characters are escaped by wrapping in `[]`, because\n * a magic character wrapped in a character class can only be satisfied by\n * that exact character. In this mode, `\\` is _not_ escaped, because it is\n * not interpreted as a magic character, but instead as a path separator.\n */\nexport const escape = (\n s: string,\n {\n windowsPathsNoEscape = false,\n }: Pick<MinimatchOptions, 'windowsPathsNoEscape'> = {}\n) => {\n // don't need to escape +@! because we escape the parens\n // that make those magic, and escaping ! as [!] isn't valid,\n // because [!]] is a valid glob class meaning not ']'.\n return windowsPathsNoEscape\n ? s.replace(/[?*()[\\]]/g, '[$&]')\n : s.replace(/[?*()[\\]\\\\]/g, '\\\\$&')\n}\n"]}
|
||||
94
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.d.ts
generated
vendored
Normal file
94
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
import { AST } from './ast.js';
|
||||
type Platform = 'aix' | 'android' | 'darwin' | 'freebsd' | 'haiku' | 'linux' | 'openbsd' | 'sunos' | 'win32' | 'cygwin' | 'netbsd';
|
||||
export interface MinimatchOptions {
|
||||
nobrace?: boolean;
|
||||
nocomment?: boolean;
|
||||
nonegate?: boolean;
|
||||
debug?: boolean;
|
||||
noglobstar?: boolean;
|
||||
noext?: boolean;
|
||||
nonull?: boolean;
|
||||
windowsPathsNoEscape?: boolean;
|
||||
allowWindowsEscape?: boolean;
|
||||
partial?: boolean;
|
||||
dot?: boolean;
|
||||
nocase?: boolean;
|
||||
nocaseMagicOnly?: boolean;
|
||||
magicalBraces?: boolean;
|
||||
matchBase?: boolean;
|
||||
flipNegate?: boolean;
|
||||
preserveMultipleSlashes?: boolean;
|
||||
optimizationLevel?: number;
|
||||
platform?: Platform;
|
||||
windowsNoMagicRoot?: boolean;
|
||||
}
|
||||
export declare const minimatch: {
|
||||
(p: string, pattern: string, options?: MinimatchOptions): boolean;
|
||||
sep: Sep;
|
||||
GLOBSTAR: typeof GLOBSTAR;
|
||||
filter: (pattern: string, options?: MinimatchOptions) => (p: string) => boolean;
|
||||
defaults: (def: MinimatchOptions) => typeof minimatch;
|
||||
braceExpand: (pattern: string, options?: MinimatchOptions) => string[];
|
||||
makeRe: (pattern: string, options?: MinimatchOptions) => false | MMRegExp;
|
||||
match: (list: string[], pattern: string, options?: MinimatchOptions) => string[];
|
||||
AST: typeof AST;
|
||||
Minimatch: typeof Minimatch;
|
||||
escape: (s: string, { windowsPathsNoEscape, }?: Pick<MinimatchOptions, "windowsPathsNoEscape">) => string;
|
||||
unescape: (s: string, { windowsPathsNoEscape, }?: Pick<MinimatchOptions, "windowsPathsNoEscape">) => string;
|
||||
};
|
||||
type Sep = '\\' | '/';
|
||||
export declare const sep: Sep;
|
||||
export declare const GLOBSTAR: unique symbol;
|
||||
export declare const filter: (pattern: string, options?: MinimatchOptions) => (p: string) => boolean;
|
||||
export declare const defaults: (def: MinimatchOptions) => typeof minimatch;
|
||||
export declare const braceExpand: (pattern: string, options?: MinimatchOptions) => string[];
|
||||
export declare const makeRe: (pattern: string, options?: MinimatchOptions) => false | MMRegExp;
|
||||
export declare const match: (list: string[], pattern: string, options?: MinimatchOptions) => string[];
|
||||
export type MMRegExp = RegExp & {
|
||||
_src?: string;
|
||||
_glob?: string;
|
||||
};
|
||||
export type ParseReturnFiltered = string | MMRegExp | typeof GLOBSTAR;
|
||||
export type ParseReturn = ParseReturnFiltered | false;
|
||||
export declare class Minimatch {
|
||||
options: MinimatchOptions;
|
||||
set: ParseReturnFiltered[][];
|
||||
pattern: string;
|
||||
windowsPathsNoEscape: boolean;
|
||||
nonegate: boolean;
|
||||
negate: boolean;
|
||||
comment: boolean;
|
||||
empty: boolean;
|
||||
preserveMultipleSlashes: boolean;
|
||||
partial: boolean;
|
||||
globSet: string[];
|
||||
globParts: string[][];
|
||||
nocase: boolean;
|
||||
isWindows: boolean;
|
||||
platform: Platform;
|
||||
windowsNoMagicRoot: boolean;
|
||||
regexp: false | null | MMRegExp;
|
||||
constructor(pattern: string, options?: MinimatchOptions);
|
||||
hasMagic(): boolean;
|
||||
debug(..._: any[]): void;
|
||||
make(): void;
|
||||
preprocess(globParts: string[][]): string[][];
|
||||
adjascentGlobstarOptimize(globParts: string[][]): string[][];
|
||||
levelOneOptimize(globParts: string[][]): string[][];
|
||||
levelTwoFileOptimize(parts: string | string[]): string[];
|
||||
firstPhasePreProcess(globParts: string[][]): string[][];
|
||||
secondPhasePreProcess(globParts: string[][]): string[][];
|
||||
partsMatch(a: string[], b: string[], emptyGSMatch?: boolean): false | string[];
|
||||
parseNegate(): void;
|
||||
matchOne(file: string[], pattern: ParseReturn[], partial?: boolean): boolean;
|
||||
braceExpand(): string[];
|
||||
parse(pattern: string): ParseReturn;
|
||||
makeRe(): false | MMRegExp;
|
||||
slashSplit(p: string): string[];
|
||||
match(f: string, partial?: boolean): boolean;
|
||||
static defaults(def: MinimatchOptions): typeof Minimatch;
|
||||
}
|
||||
export { AST } from './ast.js';
|
||||
export { escape } from './escape.js';
|
||||
export { unescape } from './unescape.js';
|
||||
//# sourceMappingURL=index.d.ts.map
|
||||
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.d.ts.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,GAAG,EAAe,MAAM,UAAU,CAAA;AAI3C,KAAK,QAAQ,GACT,KAAK,GACL,SAAS,GACT,QAAQ,GACR,SAAS,GACT,OAAO,GACP,OAAO,GACP,SAAS,GACT,OAAO,GACP,OAAO,GACP,QAAQ,GACR,QAAQ,CAAA;AAEZ,MAAM,WAAW,gBAAgB;IAC/B,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,QAAQ,CAAC,EAAE,OAAO,CAAA;IAClB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,kBAAkB,CAAC,EAAE,OAAO,CAAA;IAC5B,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,GAAG,CAAC,EAAE,OAAO,CAAA;IACb,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,eAAe,CAAC,EAAE,OAAO,CAAA;IACzB,aAAa,CAAC,EAAE,OAAO,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,UAAU,CAAC,EAAE,OAAO,CAAA;IACpB,uBAAuB,CAAC,EAAE,OAAO,CAAA;IACjC,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,QAAQ,CAAC,EAAE,QAAQ,CAAA;IACnB,kBAAkB,CAAC,EAAE,OAAO,CAAA;CAC7B;AAED,eAAO,MAAM,SAAS;QACjB,MAAM,WACA,MAAM,YACN,gBAAgB;;;sBAuGf,MAAM,YAAW,gBAAgB,SACvC,MAAM;oBAOkB,gBAAgB,KAAG,gBAAgB;2BA6EtD,MAAM,YACN,gBAAgB;sBA2BK,MAAM,YAAW,gBAAgB;kBAKzD,MAAM,EAAE,WACL,MAAM,YACN,gBAAgB;;;;;CArN1B,CAAA;AA+DD,KAAK,GAAG,GAAG,IAAI,GAAG,GAAG,CAAA;AAOrB,eAAO,MAAM,GAAG,KAAgE,CAAA;AAGhF,eAAO,MAAM,QAAQ,eAAwB,CAAA;AAmB7C,eAAO,MAAM,MAAM,YACP,MAAM,YAAW,gBAAgB,SACvC,MAAM,YACsB,CAAA;AAMlC,eAAO,MAAM,QAAQ,QAAS,gBAAgB,KAAG,gBA+DhD,CAAA;AAaD,eAAO,MAAM,WAAW,YACb,MAAM,YACN,gBAAgB,aAY1B,CAAA;AAeD,eAAO,MAAM,MAAM,YAAa,MAAM,YAAW,gBAAgB,qBACvB,CAAA;AAG1C,eAAO,MAAM,KAAK,SACV,MAAM,EAAE,WACL,MAAM,YACN,gBAAgB,aAQ1B,CAAA;AAQD,MAAM,MAAM,QAAQ,GAAG,MAAM,GAAG;IAC9B,IAAI,CAAC,EAAE,MAAM,CAAA;IACb,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG,MAAM,GAAG,QAAQ,GAAG,OAAO,QAAQ,CAAA;AACrE,MAAM,MAAM,WAAW,GAAG,mBAAmB,GAAG,KAAK,CAAA;AAErD,qBAAa,SAAS;IACpB,OAAO,EAAE,gBAAgB,CAAA;IACzB,GAAG,EAAE,mBAAmB,EAAE,EAAE,CAAA;IAC5B,OAAO,EAAE,MAAM,CAAA;IAEf,oBAAoB,EAAE,OAAO,CAAA;IAC7B,QAAQ,EAAE,OAAO,CAAA;IACjB,MAAM,EAAE,OAAO,CAAA;IACf,OAAO,EAAE,OAAO,CAAA;IAChB,KAAK,EAAE,OAAO,CAAA;IACd,uBAAuB,EAAE,OAAO,CAAA;IAChC,OAAO,EAAE,OAAO,CAAA;IAChB,OAAO,EAAE,MAAM,EAAE,CAAA;IACjB,SAAS,EAAE,MAAM,EAAE,EAAE,CAAA;IACrB,MAAM,EAAE,OAAO,CAAA;IAEf,SAAS,EAAE,OAAO,CAAA;IAClB,QAAQ,EAAE,QAAQ,CAAA;IAClB,kBAAkB,EAAE,OAAO,CAAA;IAE3B,MAAM,EAAE,KAAK,GAAG,IAAI,GAAG,QAAQ,CAAA;gBACnB,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE,gBAAqB;IAkC3D,QAAQ,IAAI,OAAO;IAYnB,KAAK,CAAC,GAAG,CAAC,EAAE,GAAG,EAAE;IAEjB,IAAI;IA0FJ,UAAU,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE;IA8BhC,yBAAyB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE;IAiB/C,gBAAgB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE;IAoBtC,oBAAoB,CAAC,KAAK,EAAE,MAAM,GAAG,MAAM,EAAE;IA6D7C,oBAAoB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE;IA0F1C,qBAAqB,CAAC,SAAS,EAAE,MAAM,EAAE,EAAE,GAAG,MAAM,EAAE,EAAE;IAkBxD,UAAU,CACR,CAAC,EAAE,MAAM,EAAE,EACX,CAAC,EAAE,MAAM,EAAE,EACX,YAAY,GAAE,OAAe,GAC5B,KAAK,GAAG,MAAM,EAAE;IA+CnB,WAAW;IAqBX,QAAQ,CAAC,IAAI,EAAE,MAAM,EAAE,EAAE,OAAO,EAAE,WAAW,EAAE,EAAE,OAAO,GAAE,OAAe;IAiNzE,WAAW;IAIX,KAAK,CAAC,OAAO,EAAE,MAAM,GAAG,WAAW;IAiDnC,MAAM;IAsFN,UAAU,CAAC,CAAC,EAAE,MAAM;IAepB,KAAK,CAAC,CAAC,EAAE,MAAM,EAAE,OAAO,UAAe;IAiEvC,MAAM,CAAC,QAAQ,CAAC,GAAG,EAAE,gBAAgB;CAGtC;AAED,OAAO,EAAE,GAAG,EAAE,MAAM,UAAU,CAAA;AAC9B,OAAO,EAAE,MAAM,EAAE,MAAM,aAAa,CAAA;AACpC,OAAO,EAAE,QAAQ,EAAE,MAAM,eAAe,CAAA"}
|
||||
1017
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.js
generated
vendored
Normal file
1017
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/universal/node_modules/minimatch/dist/commonjs/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user