main funcions fixes
This commit is contained in:
96
desktop-operator/node_modules/@electron/asar/lib/asar.d.ts
generated
vendored
Normal file
96
desktop-operator/node_modules/@electron/asar/lib/asar.d.ts
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
||||
import { FilesystemDirectoryEntry, FilesystemEntry, FilesystemLinkEntry } from './filesystem';
|
||||
import * as disk from './disk';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
import { IOptions } from './types/glob';
|
||||
export declare function createPackage(src: string, dest: string): Promise<NodeJS.WritableStream>;
|
||||
export type CreateOptions = {
|
||||
dot?: boolean;
|
||||
globOptions?: IOptions;
|
||||
/**
|
||||
* Path to a file containing the list of relative filepaths relative to `src` and the specific order they should be inserted into the asar.
|
||||
* Formats allowed below:
|
||||
* filepath
|
||||
* : filepath
|
||||
* <anything>:filepath
|
||||
*/
|
||||
ordering?: string;
|
||||
pattern?: string;
|
||||
transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
|
||||
unpack?: string;
|
||||
unpackDir?: string;
|
||||
};
|
||||
export declare function createPackageWithOptions(src: string, dest: string, options: CreateOptions): Promise<NodeJS.WritableStream>;
|
||||
/**
|
||||
* Create an ASAR archive from a list of filenames.
|
||||
*
|
||||
* @param src - Base path. All files are relative to this.
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param filenames - List of filenames relative to src.
|
||||
* @param [metadata] - Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
|
||||
* @param [options] - Options passed to `createPackageWithOptions`.
|
||||
*/
|
||||
export declare function createPackageFromFiles(src: string, dest: string, filenames: string[], metadata?: disk.InputMetadata, options?: CreateOptions): Promise<NodeJS.WritableStream>;
|
||||
export type AsarStream = {
|
||||
/**
|
||||
Relative path to the file or directory from within the archive
|
||||
*/
|
||||
path: string;
|
||||
/**
|
||||
Function that returns a read stream for a file.
|
||||
Note: this is called multiple times per "file", so a new NodeJS.ReadableStream needs to be created each time
|
||||
*/
|
||||
streamGenerator: () => NodeJS.ReadableStream;
|
||||
/**
|
||||
Whether the file/link should be unpacked
|
||||
*/
|
||||
unpacked: boolean;
|
||||
stat: CrawledFileType['stat'];
|
||||
};
|
||||
export type AsarDirectory = Pick<AsarStream, 'path' | 'unpacked'> & {
|
||||
type: 'directory';
|
||||
};
|
||||
export type AsarSymlinkStream = AsarStream & {
|
||||
type: 'link';
|
||||
symlink: string;
|
||||
};
|
||||
export type AsarFileStream = AsarStream & {
|
||||
type: 'file';
|
||||
};
|
||||
export type AsarStreamType = AsarDirectory | AsarFileStream | AsarSymlinkStream;
|
||||
/**
|
||||
* Create an ASAR archive from a list of streams.
|
||||
*
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param streams - List of streams to be piped in-memory into asar filesystem. Insertion order is preserved.
|
||||
*/
|
||||
export declare function createPackageFromStreams(dest: string, streams: AsarStreamType[]): Promise<import("fs").WriteStream>;
|
||||
export declare function statFile(archivePath: string, filename: string, followLinks?: boolean): FilesystemEntry;
|
||||
export declare function getRawHeader(archivePath: string): disk.ArchiveHeader;
|
||||
export interface ListOptions {
|
||||
isPack: boolean;
|
||||
}
|
||||
export declare function listPackage(archivePath: string, options: ListOptions): string[];
|
||||
export declare function extractFile(archivePath: string, filename: string, followLinks?: boolean): Buffer;
|
||||
export declare function extractAll(archivePath: string, dest: string): void;
|
||||
export declare function uncache(archivePath: string): boolean;
|
||||
export declare function uncacheAll(): void;
|
||||
export { EntryMetadata } from './filesystem';
|
||||
export { InputMetadata, DirectoryRecord, FileRecord, ArchiveHeader } from './disk';
|
||||
export type InputMetadataType = 'directory' | 'file' | 'link';
|
||||
export type DirectoryMetadata = FilesystemDirectoryEntry;
|
||||
export type FileMetadata = FilesystemEntry;
|
||||
export type LinkMetadata = FilesystemLinkEntry;
|
||||
declare const _default: {
|
||||
createPackage: typeof createPackage;
|
||||
createPackageWithOptions: typeof createPackageWithOptions;
|
||||
createPackageFromFiles: typeof createPackageFromFiles;
|
||||
createPackageFromStreams: typeof createPackageFromStreams;
|
||||
statFile: typeof statFile;
|
||||
getRawHeader: typeof getRawHeader;
|
||||
listPackage: typeof listPackage;
|
||||
extractFile: typeof extractFile;
|
||||
extractAll: typeof extractAll;
|
||||
uncache: typeof uncache;
|
||||
uncacheAll: typeof uncacheAll;
|
||||
};
|
||||
export default _default;
|
||||
337
desktop-operator/node_modules/@electron/asar/lib/asar.js
generated
vendored
Normal file
337
desktop-operator/node_modules/@electron/asar/lib/asar.js
generated
vendored
Normal file
@@ -0,0 +1,337 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createPackage = createPackage;
|
||||
exports.createPackageWithOptions = createPackageWithOptions;
|
||||
exports.createPackageFromFiles = createPackageFromFiles;
|
||||
exports.createPackageFromStreams = createPackageFromStreams;
|
||||
exports.statFile = statFile;
|
||||
exports.getRawHeader = getRawHeader;
|
||||
exports.listPackage = listPackage;
|
||||
exports.extractFile = extractFile;
|
||||
exports.extractAll = extractAll;
|
||||
exports.uncache = uncache;
|
||||
exports.uncacheAll = uncacheAll;
|
||||
const path = __importStar(require("path"));
|
||||
const minimatch_1 = __importDefault(require("minimatch"));
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const filesystem_1 = require("./filesystem");
|
||||
const disk = __importStar(require("./disk"));
|
||||
const crawlfs_1 = require("./crawlfs");
|
||||
/**
|
||||
* Whether a directory should be excluded from packing due to the `--unpack-dir" option.
|
||||
*
|
||||
* @param dirPath - directory path to check
|
||||
* @param pattern - literal prefix [for backward compatibility] or glob pattern
|
||||
* @param unpackDirs - Array of directory paths previously marked as unpacked
|
||||
*/
|
||||
function isUnpackedDir(dirPath, pattern, unpackDirs) {
|
||||
if (dirPath.startsWith(pattern) || (0, minimatch_1.default)(dirPath, pattern)) {
|
||||
if (!unpackDirs.includes(dirPath)) {
|
||||
unpackDirs.push(dirPath);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return unpackDirs.some((unpackDir) => dirPath.startsWith(unpackDir) && !path.relative(unpackDir, dirPath).startsWith('..'));
|
||||
}
|
||||
}
|
||||
async function createPackage(src, dest) {
|
||||
return createPackageWithOptions(src, dest, {});
|
||||
}
|
||||
async function createPackageWithOptions(src, dest, options) {
|
||||
const globOptions = options.globOptions ? options.globOptions : {};
|
||||
globOptions.dot = options.dot === undefined ? true : options.dot;
|
||||
const pattern = src + (options.pattern ? options.pattern : '/**/*');
|
||||
const [filenames, metadata] = await (0, crawlfs_1.crawl)(pattern, globOptions);
|
||||
return createPackageFromFiles(src, dest, filenames, metadata, options);
|
||||
}
|
||||
/**
|
||||
* Create an ASAR archive from a list of filenames.
|
||||
*
|
||||
* @param src - Base path. All files are relative to this.
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param filenames - List of filenames relative to src.
|
||||
* @param [metadata] - Object with filenames as keys and {type='directory|file|link', stat: fs.stat} as values. (Optional)
|
||||
* @param [options] - Options passed to `createPackageWithOptions`.
|
||||
*/
|
||||
async function createPackageFromFiles(src, dest, filenames, metadata = {}, options = {}) {
|
||||
src = path.normalize(src);
|
||||
dest = path.normalize(dest);
|
||||
filenames = filenames.map(function (filename) {
|
||||
return path.normalize(filename);
|
||||
});
|
||||
const filesystem = new filesystem_1.Filesystem(src);
|
||||
const files = [];
|
||||
const links = [];
|
||||
const unpackDirs = [];
|
||||
let filenamesSorted = [];
|
||||
if (options.ordering) {
|
||||
const orderingFiles = (await wrapped_fs_1.default.readFile(options.ordering))
|
||||
.toString()
|
||||
.split('\n')
|
||||
.map((line) => {
|
||||
if (line.includes(':')) {
|
||||
line = line.split(':').pop();
|
||||
}
|
||||
line = line.trim();
|
||||
if (line.startsWith('/')) {
|
||||
line = line.slice(1);
|
||||
}
|
||||
return line;
|
||||
});
|
||||
const ordering = [];
|
||||
for (const file of orderingFiles) {
|
||||
const pathComponents = file.split(path.sep);
|
||||
let str = src;
|
||||
for (const pathComponent of pathComponents) {
|
||||
str = path.join(str, pathComponent);
|
||||
ordering.push(str);
|
||||
}
|
||||
}
|
||||
let missing = 0;
|
||||
const total = filenames.length;
|
||||
for (const file of ordering) {
|
||||
if (!filenamesSorted.includes(file) && filenames.includes(file)) {
|
||||
filenamesSorted.push(file);
|
||||
}
|
||||
}
|
||||
for (const file of filenames) {
|
||||
if (!filenamesSorted.includes(file)) {
|
||||
filenamesSorted.push(file);
|
||||
missing += 1;
|
||||
}
|
||||
}
|
||||
console.log(`Ordering file has ${((total - missing) / total) * 100}% coverage.`);
|
||||
}
|
||||
else {
|
||||
filenamesSorted = filenames;
|
||||
}
|
||||
const handleFile = async function (filename) {
|
||||
if (!metadata[filename]) {
|
||||
const fileType = await (0, crawlfs_1.determineFileType)(filename);
|
||||
if (!fileType) {
|
||||
throw new Error('Unknown file type for file: ' + filename);
|
||||
}
|
||||
metadata[filename] = fileType;
|
||||
}
|
||||
const file = metadata[filename];
|
||||
const shouldUnpackPath = function (relativePath, unpack, unpackDir) {
|
||||
let shouldUnpack = false;
|
||||
if (unpack) {
|
||||
shouldUnpack = (0, minimatch_1.default)(filename, unpack, { matchBase: true });
|
||||
}
|
||||
if (!shouldUnpack && unpackDir) {
|
||||
shouldUnpack = isUnpackedDir(relativePath, unpackDir, unpackDirs);
|
||||
}
|
||||
return shouldUnpack;
|
||||
};
|
||||
let shouldUnpack;
|
||||
switch (file.type) {
|
||||
case 'directory':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, filename), undefined, options.unpackDir);
|
||||
filesystem.insertDirectory(filename, shouldUnpack);
|
||||
break;
|
||||
case 'file':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, path.dirname(filename)), options.unpack, options.unpackDir);
|
||||
files.push({ filename, unpack: shouldUnpack });
|
||||
return filesystem.insertFile(filename, () => wrapped_fs_1.default.createReadStream(filename), shouldUnpack, file, options);
|
||||
case 'link':
|
||||
shouldUnpack = shouldUnpackPath(path.relative(src, filename), options.unpack, options.unpackDir);
|
||||
links.push({ filename, unpack: shouldUnpack });
|
||||
filesystem.insertLink(filename, shouldUnpack);
|
||||
break;
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
const insertsDone = async function () {
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(dest));
|
||||
return disk.writeFilesystem(dest, filesystem, { files, links }, metadata);
|
||||
};
|
||||
const names = filenamesSorted.slice();
|
||||
const next = async function (name) {
|
||||
if (!name) {
|
||||
return insertsDone();
|
||||
}
|
||||
await handleFile(name);
|
||||
return next(names.shift());
|
||||
};
|
||||
return next(names.shift());
|
||||
}
|
||||
/**
|
||||
* Create an ASAR archive from a list of streams.
|
||||
*
|
||||
* @param dest - Archive filename (& path).
|
||||
* @param streams - List of streams to be piped in-memory into asar filesystem. Insertion order is preserved.
|
||||
*/
|
||||
async function createPackageFromStreams(dest, streams) {
|
||||
// We use an ambiguous root `src` since we're piping directly from a stream and the `filePath` for the stream is already relative to the src/root
|
||||
const src = '.';
|
||||
const filesystem = new filesystem_1.Filesystem(src);
|
||||
const files = [];
|
||||
const links = [];
|
||||
const handleFile = async function (stream) {
|
||||
const { path: destinationPath, type } = stream;
|
||||
const filename = path.normalize(destinationPath);
|
||||
switch (type) {
|
||||
case 'directory':
|
||||
filesystem.insertDirectory(filename, stream.unpacked);
|
||||
break;
|
||||
case 'file':
|
||||
files.push({
|
||||
filename,
|
||||
streamGenerator: stream.streamGenerator,
|
||||
link: undefined,
|
||||
mode: stream.stat.mode,
|
||||
unpack: stream.unpacked,
|
||||
});
|
||||
return filesystem.insertFile(filename, stream.streamGenerator, stream.unpacked, {
|
||||
type: 'file',
|
||||
stat: stream.stat,
|
||||
});
|
||||
case 'link':
|
||||
links.push({
|
||||
filename,
|
||||
streamGenerator: stream.streamGenerator,
|
||||
link: stream.symlink,
|
||||
mode: stream.stat.mode,
|
||||
unpack: stream.unpacked,
|
||||
});
|
||||
filesystem.insertLink(filename, stream.unpacked, path.dirname(filename), stream.symlink, src);
|
||||
break;
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
||||
const insertsDone = async function () {
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(dest));
|
||||
return disk.streamFilesystem(dest, filesystem, { files, links });
|
||||
};
|
||||
const streamQueue = streams.slice();
|
||||
const next = async function (stream) {
|
||||
if (!stream) {
|
||||
return insertsDone();
|
||||
}
|
||||
await handleFile(stream);
|
||||
return next(streamQueue.shift());
|
||||
};
|
||||
return next(streamQueue.shift());
|
||||
}
|
||||
function statFile(archivePath, filename, followLinks = true) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
return filesystem.getFile(filename, followLinks);
|
||||
}
|
||||
function getRawHeader(archivePath) {
|
||||
return disk.readArchiveHeaderSync(archivePath);
|
||||
}
|
||||
function listPackage(archivePath, options) {
|
||||
return disk.readFilesystemSync(archivePath).listFiles(options);
|
||||
}
|
||||
function extractFile(archivePath, filename, followLinks = true) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
const fileInfo = filesystem.getFile(filename, followLinks);
|
||||
if ('link' in fileInfo || 'files' in fileInfo) {
|
||||
throw new Error('Expected to find file at: ' + filename + ' but found a directory or link');
|
||||
}
|
||||
return disk.readFileSync(filesystem, filename, fileInfo);
|
||||
}
|
||||
function extractAll(archivePath, dest) {
|
||||
const filesystem = disk.readFilesystemSync(archivePath);
|
||||
const filenames = filesystem.listFiles();
|
||||
// under windows just extract links as regular files
|
||||
const followLinks = process.platform === 'win32';
|
||||
// create destination directory
|
||||
wrapped_fs_1.default.mkdirpSync(dest);
|
||||
const extractionErrors = [];
|
||||
for (const fullPath of filenames) {
|
||||
// Remove leading slash
|
||||
const filename = fullPath.substr(1);
|
||||
const destFilename = path.join(dest, filename);
|
||||
const file = filesystem.getFile(filename, followLinks);
|
||||
if (path.relative(dest, destFilename).startsWith('..')) {
|
||||
throw new Error(`${fullPath}: file "${destFilename}" writes out of the package`);
|
||||
}
|
||||
if ('files' in file) {
|
||||
// it's a directory, create it and continue with the next entry
|
||||
wrapped_fs_1.default.mkdirpSync(destFilename);
|
||||
}
|
||||
else if ('link' in file) {
|
||||
// it's a symlink, create a symlink
|
||||
const linkSrcPath = path.dirname(path.join(dest, file.link));
|
||||
const linkDestPath = path.dirname(destFilename);
|
||||
const relativePath = path.relative(linkDestPath, linkSrcPath);
|
||||
// try to delete output file, because we can't overwrite a link
|
||||
try {
|
||||
wrapped_fs_1.default.unlinkSync(destFilename);
|
||||
}
|
||||
catch (_a) { }
|
||||
const linkTo = path.join(relativePath, path.basename(file.link));
|
||||
if (path.relative(dest, linkSrcPath).startsWith('..')) {
|
||||
throw new Error(`${fullPath}: file "${file.link}" links out of the package to "${linkSrcPath}"`);
|
||||
}
|
||||
wrapped_fs_1.default.symlinkSync(linkTo, destFilename);
|
||||
}
|
||||
else {
|
||||
// it's a file, try to extract it
|
||||
try {
|
||||
const content = disk.readFileSync(filesystem, filename, file);
|
||||
wrapped_fs_1.default.writeFileSync(destFilename, content);
|
||||
if (file.executable) {
|
||||
wrapped_fs_1.default.chmodSync(destFilename, '755');
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
extractionErrors.push(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (extractionErrors.length) {
|
||||
throw new Error('Unable to extract some files:\n\n' +
|
||||
extractionErrors.map((error) => error.stack).join('\n\n'));
|
||||
}
|
||||
}
|
||||
function uncache(archivePath) {
|
||||
return disk.uncacheFilesystem(archivePath);
|
||||
}
|
||||
function uncacheAll() {
|
||||
disk.uncacheAll();
|
||||
}
|
||||
// Export everything in default, too
|
||||
exports.default = {
|
||||
createPackage,
|
||||
createPackageWithOptions,
|
||||
createPackageFromFiles,
|
||||
createPackageFromStreams,
|
||||
statFile,
|
||||
getRawHeader,
|
||||
listPackage,
|
||||
extractFile,
|
||||
extractAll,
|
||||
uncache,
|
||||
uncacheAll,
|
||||
};
|
||||
//# sourceMappingURL=asar.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/asar.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/asar.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
12
desktop-operator/node_modules/@electron/asar/lib/crawlfs.d.ts
generated
vendored
Normal file
12
desktop-operator/node_modules/@electron/asar/lib/crawlfs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Stats } from 'fs';
|
||||
import { IOptions } from './types/glob';
|
||||
export type CrawledFileType = {
|
||||
type: 'file' | 'directory' | 'link';
|
||||
stat: Pick<Stats, 'mode' | 'size'>;
|
||||
transformed?: {
|
||||
path: string;
|
||||
stat: Stats;
|
||||
};
|
||||
};
|
||||
export declare function determineFileType(filename: string): Promise<CrawledFileType | null>;
|
||||
export declare function crawl(dir: string, options: IOptions): Promise<readonly [string[], Record<string, CrawledFileType>]>;
|
||||
79
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js
generated
vendored
Normal file
79
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js
generated
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.determineFileType = determineFileType;
|
||||
exports.crawl = crawl;
|
||||
const util_1 = require("util");
|
||||
const glob_1 = require("glob");
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const path = __importStar(require("path"));
|
||||
const glob = (0, util_1.promisify)(glob_1.glob);
|
||||
async function determineFileType(filename) {
|
||||
const stat = await wrapped_fs_1.default.lstat(filename);
|
||||
if (stat.isFile()) {
|
||||
return { type: 'file', stat };
|
||||
}
|
||||
else if (stat.isDirectory()) {
|
||||
return { type: 'directory', stat };
|
||||
}
|
||||
else if (stat.isSymbolicLink()) {
|
||||
return { type: 'link', stat };
|
||||
}
|
||||
return null;
|
||||
}
|
||||
async function crawl(dir, options) {
|
||||
const metadata = {};
|
||||
const crawled = await glob(dir, options);
|
||||
const results = await Promise.all(crawled.map(async (filename) => [filename, await determineFileType(filename)]));
|
||||
const links = [];
|
||||
const filenames = results
|
||||
.map(([filename, type]) => {
|
||||
if (type) {
|
||||
metadata[filename] = type;
|
||||
if (type.type === 'link')
|
||||
links.push(filename);
|
||||
}
|
||||
return filename;
|
||||
})
|
||||
.filter((filename) => {
|
||||
// Newer glob can return files inside symlinked directories, to avoid
|
||||
// those appearing in archives we need to manually exclude theme here
|
||||
const exactLinkIndex = links.findIndex((link) => filename === link);
|
||||
return links.every((link, index) => {
|
||||
if (index === exactLinkIndex) {
|
||||
return true;
|
||||
}
|
||||
const isFileWithinSymlinkDir = filename.startsWith(link);
|
||||
// symlink may point outside the directory: https://github.com/electron/asar/issues/303
|
||||
const relativePath = path.relative(link, path.dirname(filename));
|
||||
return !isFileWithinSymlinkDir || relativePath.startsWith('..');
|
||||
});
|
||||
});
|
||||
return [filenames, metadata];
|
||||
}
|
||||
//# sourceMappingURL=crawlfs.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/crawlfs.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"crawlfs.js","sourceRoot":"","sources":["../src/crawlfs.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAmBA,8CAUC;AAED,sBA8BC;AA7DD,+BAAiC;AACjC,+BAAqC;AAErC,8DAA8B;AAE9B,2CAA6B;AAG7B,MAAM,IAAI,GAAG,IAAA,gBAAS,EAAC,WAAK,CAAC,CAAC;AAWvB,KAAK,UAAU,iBAAiB,CAAC,QAAgB;IACtD,MAAM,IAAI,GAAG,MAAM,oBAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;IACtC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAC;QAClB,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;IAChC,CAAC;SAAM,IAAI,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC;QAC9B,OAAO,EAAE,IAAI,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC;IACrC,CAAC;SAAM,IAAI,IAAI,CAAC,cAAc,EAAE,EAAE,CAAC;QACjC,OAAO,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,CAAC;IAChC,CAAC;IACD,OAAO,IAAI,CAAC;AACd,CAAC;AAEM,KAAK,UAAU,KAAK,CAAC,GAAW,EAAE,OAAiB;IACxD,MAAM,QAAQ,GAAoC,EAAE,CAAC;IACrD,MAAM,OAAO,GAAG,MAAM,IAAI,CAAC,GAAG,EAAE,OAAO,CAAC,CAAC;IACzC,MAAM,OAAO,GAAG,MAAM,OAAO,CAAC,GAAG,CAC/B,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC,QAAQ,EAAE,MAAM,iBAAiB,CAAC,QAAQ,CAAC,CAAU,CAAC,CACxF,CAAC;IACF,MAAM,KAAK,GAAa,EAAE,CAAC;IAC3B,MAAM,SAAS,GAAG,OAAO;SACtB,GAAG,CAAC,CAAC,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE,EAAE;QACxB,IAAI,IAAI,EAAE,CAAC;YACT,QAAQ,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC;YAC1B,IAAI,IAAI,CAAC,IAAI,KAAK,MAAM;gBAAE,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACjD,CAAC;QACD,OAAO,QAAQ,CAAC;IAClB,CAAC,CAAC;SACD,MAAM,CAAC,CAAC,QAAQ,EAAE,EAAE;QACnB,qEAAqE;QACrE,qEAAqE;QACrE,MAAM,cAAc,GAAG,KAAK,CAAC,SAAS,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,QAAQ,KAAK,IAAI,CAAC,CAAC;QACpE,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YACjC,IAAI,KAAK,KAAK,cAAc,EAAE,CAAC;gBAC7B,OAAO,IAAI,CAAC;YACd,CAAC;YACD,MAAM,sBAAsB,GAAG,QAAQ,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;YACzD,uFAAuF;YACvF,MAAM,YAAY,GAAG,IAAI,CAAC,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC;YACjE,OAAO,CAAC,sBAAsB,IAAI,YAAY,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QAClE,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAC;IACL,OAAO,CAAC,SAAS,EAAE,QAAQ,CAAU,CAAC;AACxC,CAAC"}
|
||||
44
desktop-operator/node_modules/@electron/asar/lib/disk.d.ts
generated
vendored
Normal file
44
desktop-operator/node_modules/@electron/asar/lib/disk.d.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Filesystem, FilesystemFileEntry } from './filesystem';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
import { Stats } from 'fs';
|
||||
export type InputMetadata = {
|
||||
[property: string]: CrawledFileType;
|
||||
};
|
||||
export type BasicFilesArray = {
|
||||
filename: string;
|
||||
unpack: boolean;
|
||||
}[];
|
||||
export type BasicStreamArray = {
|
||||
filename: string;
|
||||
streamGenerator: () => NodeJS.ReadableStream;
|
||||
mode: Stats['mode'];
|
||||
unpack: boolean;
|
||||
link: string | undefined;
|
||||
}[];
|
||||
export type FilesystemFilesAndLinks<T extends BasicFilesArray | BasicStreamArray> = {
|
||||
files: T;
|
||||
links: T;
|
||||
};
|
||||
export declare function writeFilesystem(dest: string, filesystem: Filesystem, lists: FilesystemFilesAndLinks<BasicFilesArray>, metadata: InputMetadata): Promise<NodeJS.WritableStream>;
|
||||
export declare function streamFilesystem(dest: string, filesystem: Filesystem, lists: FilesystemFilesAndLinks<BasicStreamArray>): Promise<import("fs").WriteStream>;
|
||||
export interface FileRecord extends FilesystemFileEntry {
|
||||
integrity: {
|
||||
hash: string;
|
||||
algorithm: 'SHA256';
|
||||
blocks: string[];
|
||||
blockSize: number;
|
||||
};
|
||||
}
|
||||
export type DirectoryRecord = {
|
||||
files: Record<string, DirectoryRecord | FileRecord>;
|
||||
};
|
||||
export type ArchiveHeader = {
|
||||
header: DirectoryRecord;
|
||||
headerString: string;
|
||||
headerSize: number;
|
||||
};
|
||||
export declare function readArchiveHeaderSync(archivePath: string): ArchiveHeader;
|
||||
export declare function readFilesystemSync(archivePath: string): Filesystem;
|
||||
export declare function uncacheFilesystem(archivePath: string): boolean;
|
||||
export declare function uncacheAll(): void;
|
||||
export declare function readFileSync(filesystem: Filesystem, filename: string, info: FilesystemFileEntry): Buffer;
|
||||
219
desktop-operator/node_modules/@electron/asar/lib/disk.js
generated
vendored
Normal file
219
desktop-operator/node_modules/@electron/asar/lib/disk.js
generated
vendored
Normal file
@@ -0,0 +1,219 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __asyncValues = (this && this.__asyncValues) || function (o) {
|
||||
if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
|
||||
var m = o[Symbol.asyncIterator], i;
|
||||
return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
|
||||
function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
|
||||
function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.writeFilesystem = writeFilesystem;
|
||||
exports.streamFilesystem = streamFilesystem;
|
||||
exports.readArchiveHeaderSync = readArchiveHeaderSync;
|
||||
exports.readFilesystemSync = readFilesystemSync;
|
||||
exports.uncacheFilesystem = uncacheFilesystem;
|
||||
exports.uncacheAll = uncacheAll;
|
||||
exports.readFileSync = readFileSync;
|
||||
const path = __importStar(require("path"));
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const pickle_1 = require("./pickle");
|
||||
const filesystem_1 = require("./filesystem");
|
||||
const util_1 = require("util");
|
||||
const stream = __importStar(require("stream"));
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
let filesystemCache = Object.create(null);
|
||||
async function copyFile(dest, src, filename) {
|
||||
const srcFile = path.join(src, filename);
|
||||
const targetFile = path.join(dest, filename);
|
||||
const [content, stats] = await Promise.all([
|
||||
wrapped_fs_1.default.readFile(srcFile),
|
||||
wrapped_fs_1.default.stat(srcFile),
|
||||
wrapped_fs_1.default.mkdirp(path.dirname(targetFile)),
|
||||
]);
|
||||
return wrapped_fs_1.default.writeFile(targetFile, content, { mode: stats.mode });
|
||||
}
|
||||
async function streamTransformedFile(stream, outStream) {
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.pipe(outStream, { end: false });
|
||||
stream.on('error', reject);
|
||||
stream.on('end', () => resolve());
|
||||
});
|
||||
}
|
||||
const writeFileListToStream = async function (dest, filesystem, out, lists, metadata) {
|
||||
const { files, links } = lists;
|
||||
for (const file of files) {
|
||||
if (file.unpack) {
|
||||
// the file should not be packed into archive
|
||||
const filename = path.relative(filesystem.getRootPath(), file.filename);
|
||||
await copyFile(`${dest}.unpacked`, filesystem.getRootPath(), filename);
|
||||
}
|
||||
else {
|
||||
const transformed = metadata[file.filename].transformed;
|
||||
const stream = wrapped_fs_1.default.createReadStream(transformed ? transformed.path : file.filename);
|
||||
await streamTransformedFile(stream, out);
|
||||
}
|
||||
}
|
||||
for (const file of links.filter((f) => f.unpack)) {
|
||||
// the symlink needs to be recreated outside in .unpacked
|
||||
const filename = path.relative(filesystem.getRootPath(), file.filename);
|
||||
const link = await wrapped_fs_1.default.readlink(file.filename);
|
||||
await createSymlink(dest, filename, link);
|
||||
}
|
||||
return out.end();
|
||||
};
|
||||
async function writeFilesystem(dest, filesystem, lists, metadata) {
|
||||
const out = await createFilesystemWriteStream(filesystem, dest);
|
||||
return writeFileListToStream(dest, filesystem, out, lists, metadata);
|
||||
}
|
||||
async function streamFilesystem(dest, filesystem, lists) {
|
||||
var _a, e_1, _b, _c;
|
||||
const out = await createFilesystemWriteStream(filesystem, dest);
|
||||
const { files, links } = lists;
|
||||
try {
|
||||
for (var _d = true, files_1 = __asyncValues(files), files_1_1; files_1_1 = await files_1.next(), _a = files_1_1.done, !_a; _d = true) {
|
||||
_c = files_1_1.value;
|
||||
_d = false;
|
||||
const file = _c;
|
||||
// the file should not be packed into archive
|
||||
if (file.unpack) {
|
||||
const targetFile = path.join(`${dest}.unpacked`, file.filename);
|
||||
await wrapped_fs_1.default.mkdirp(path.dirname(targetFile));
|
||||
const writeStream = wrapped_fs_1.default.createWriteStream(targetFile, { mode: file.mode });
|
||||
await pipeline(file.streamGenerator(), writeStream);
|
||||
}
|
||||
else {
|
||||
await streamTransformedFile(file.streamGenerator(), out);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (e_1_1) { e_1 = { error: e_1_1 }; }
|
||||
finally {
|
||||
try {
|
||||
if (!_d && !_a && (_b = files_1.return)) await _b.call(files_1);
|
||||
}
|
||||
finally { if (e_1) throw e_1.error; }
|
||||
}
|
||||
for (const file of links.filter((f) => f.unpack && f.link)) {
|
||||
// the symlink needs to be recreated outside in .unpacked
|
||||
await createSymlink(dest, file.filename, file.link);
|
||||
}
|
||||
return out.end();
|
||||
}
|
||||
function readArchiveHeaderSync(archivePath) {
|
||||
const fd = wrapped_fs_1.default.openSync(archivePath, 'r');
|
||||
let size;
|
||||
let headerBuf;
|
||||
try {
|
||||
const sizeBuf = Buffer.alloc(8);
|
||||
if (wrapped_fs_1.default.readSync(fd, sizeBuf, 0, 8, null) !== 8) {
|
||||
throw new Error('Unable to read header size');
|
||||
}
|
||||
const sizePickle = pickle_1.Pickle.createFromBuffer(sizeBuf);
|
||||
size = sizePickle.createIterator().readUInt32();
|
||||
headerBuf = Buffer.alloc(size);
|
||||
if (wrapped_fs_1.default.readSync(fd, headerBuf, 0, size, null) !== size) {
|
||||
throw new Error('Unable to read header');
|
||||
}
|
||||
}
|
||||
finally {
|
||||
wrapped_fs_1.default.closeSync(fd);
|
||||
}
|
||||
const headerPickle = pickle_1.Pickle.createFromBuffer(headerBuf);
|
||||
const header = headerPickle.createIterator().readString();
|
||||
return { headerString: header, header: JSON.parse(header), headerSize: size };
|
||||
}
|
||||
function readFilesystemSync(archivePath) {
|
||||
if (!filesystemCache[archivePath]) {
|
||||
const header = readArchiveHeaderSync(archivePath);
|
||||
const filesystem = new filesystem_1.Filesystem(archivePath);
|
||||
filesystem.setHeader(header.header, header.headerSize);
|
||||
filesystemCache[archivePath] = filesystem;
|
||||
}
|
||||
return filesystemCache[archivePath];
|
||||
}
|
||||
function uncacheFilesystem(archivePath) {
|
||||
if (filesystemCache[archivePath]) {
|
||||
filesystemCache[archivePath] = undefined;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function uncacheAll() {
|
||||
filesystemCache = {};
|
||||
}
|
||||
function readFileSync(filesystem, filename, info) {
|
||||
let buffer = Buffer.alloc(info.size);
|
||||
if (info.size <= 0) {
|
||||
return buffer;
|
||||
}
|
||||
if (info.unpacked) {
|
||||
// it's an unpacked file, copy it.
|
||||
buffer = wrapped_fs_1.default.readFileSync(path.join(`${filesystem.getRootPath()}.unpacked`, filename));
|
||||
}
|
||||
else {
|
||||
// Node throws an exception when reading 0 bytes into a 0-size buffer,
|
||||
// so we short-circuit the read in this case.
|
||||
const fd = wrapped_fs_1.default.openSync(filesystem.getRootPath(), 'r');
|
||||
try {
|
||||
const offset = 8 + filesystem.getHeaderSize() + parseInt(info.offset);
|
||||
wrapped_fs_1.default.readSync(fd, buffer, 0, info.size, offset);
|
||||
}
|
||||
finally {
|
||||
wrapped_fs_1.default.closeSync(fd);
|
||||
}
|
||||
}
|
||||
return buffer;
|
||||
}
|
||||
async function createFilesystemWriteStream(filesystem, dest) {
|
||||
const headerPickle = pickle_1.Pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.getHeader()));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle_1.Pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
const out = wrapped_fs_1.default.createWriteStream(dest);
|
||||
await new Promise((resolve, reject) => {
|
||||
out.on('error', reject);
|
||||
out.write(sizeBuf);
|
||||
return out.write(headerBuf, () => resolve());
|
||||
});
|
||||
return out;
|
||||
}
|
||||
async function createSymlink(dest, filepath, link) {
|
||||
// if symlink is within subdirectories, then we need to recreate dir structure
|
||||
await wrapped_fs_1.default.mkdirp(path.join(`${dest}.unpacked`, path.dirname(filepath)));
|
||||
// create symlink within unpacked dir
|
||||
await wrapped_fs_1.default.symlink(link, path.join(`${dest}.unpacked`, filepath)).catch(async (error) => {
|
||||
if (error.code === 'EPERM' && error.syscall === 'symlink') {
|
||||
throw new Error('Could not create symlinks for unpacked assets. On Windows, consider activating Developer Mode to allow non-admin users to create symlinks by following the instructions at https://docs.microsoft.com/en-us/windows/apps/get-started/enable-your-device-for-development.');
|
||||
}
|
||||
throw error;
|
||||
});
|
||||
}
|
||||
//# sourceMappingURL=disk.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/disk.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/disk.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
44
desktop-operator/node_modules/@electron/asar/lib/filesystem.d.ts
generated
vendored
Normal file
44
desktop-operator/node_modules/@electron/asar/lib/filesystem.d.ts
generated
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
import { FileIntegrity } from './integrity';
|
||||
import { CrawledFileType } from './crawlfs';
|
||||
export type EntryMetadata = {
|
||||
unpacked?: boolean;
|
||||
};
|
||||
export type FilesystemDirectoryEntry = {
|
||||
files: Record<string, FilesystemEntry>;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemFileEntry = {
|
||||
unpacked: boolean;
|
||||
executable: boolean;
|
||||
offset: string;
|
||||
size: number;
|
||||
integrity: FileIntegrity;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemLinkEntry = {
|
||||
link: string;
|
||||
} & EntryMetadata;
|
||||
export type FilesystemEntry = FilesystemDirectoryEntry | FilesystemFileEntry | FilesystemLinkEntry;
|
||||
export declare class Filesystem {
|
||||
private src;
|
||||
private header;
|
||||
private headerSize;
|
||||
private offset;
|
||||
constructor(src: string);
|
||||
getRootPath(): string;
|
||||
getHeader(): FilesystemEntry;
|
||||
getHeaderSize(): number;
|
||||
setHeader(header: FilesystemEntry, headerSize: number): void;
|
||||
searchNodeFromDirectory(p: string): FilesystemEntry;
|
||||
searchNodeFromPath(p: string): FilesystemEntry;
|
||||
insertDirectory(p: string, shouldUnpack: boolean): Record<string, FilesystemEntry>;
|
||||
insertFile(p: string, streamGenerator: () => NodeJS.ReadableStream, shouldUnpack: boolean, file: CrawledFileType, options?: {
|
||||
transform?: (filePath: string) => NodeJS.ReadWriteStream | void;
|
||||
}): Promise<void>;
|
||||
insertLink(p: string, shouldUnpack: boolean, parentPath?: string, symlink?: string, // /var/tmp => /private/var
|
||||
src?: string): string;
|
||||
private resolveLink;
|
||||
listFiles(options?: {
|
||||
isPack: boolean;
|
||||
}): string[];
|
||||
getNode(p: string, followLinks?: boolean): FilesystemEntry;
|
||||
getFile(p: string, followLinks?: boolean): FilesystemEntry;
|
||||
}
|
||||
199
desktop-operator/node_modules/@electron/asar/lib/filesystem.js
generated
vendored
Normal file
199
desktop-operator/node_modules/@electron/asar/lib/filesystem.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Filesystem = void 0;
|
||||
const os = __importStar(require("os"));
|
||||
const path = __importStar(require("path"));
|
||||
const util_1 = require("util");
|
||||
const stream = __importStar(require("stream"));
|
||||
const integrity_1 = require("./integrity");
|
||||
const wrapped_fs_1 = __importDefault(require("./wrapped-fs"));
|
||||
const UINT32_MAX = 2 ** 32 - 1;
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
class Filesystem {
|
||||
constructor(src) {
|
||||
this.src = path.resolve(src);
|
||||
this.header = { files: Object.create(null) };
|
||||
this.headerSize = 0;
|
||||
this.offset = BigInt(0);
|
||||
}
|
||||
getRootPath() {
|
||||
return this.src;
|
||||
}
|
||||
getHeader() {
|
||||
return this.header;
|
||||
}
|
||||
getHeaderSize() {
|
||||
return this.headerSize;
|
||||
}
|
||||
setHeader(header, headerSize) {
|
||||
this.header = header;
|
||||
this.headerSize = headerSize;
|
||||
}
|
||||
searchNodeFromDirectory(p) {
|
||||
let json = this.header;
|
||||
const dirs = p.split(path.sep);
|
||||
for (const dir of dirs) {
|
||||
if (dir !== '.') {
|
||||
if ('files' in json) {
|
||||
if (!json.files[dir]) {
|
||||
json.files[dir] = { files: Object.create(null) };
|
||||
}
|
||||
json = json.files[dir];
|
||||
}
|
||||
else {
|
||||
throw new Error('Unexpected directory state while traversing: ' + p);
|
||||
}
|
||||
}
|
||||
}
|
||||
return json;
|
||||
}
|
||||
searchNodeFromPath(p) {
|
||||
p = path.relative(this.src, p);
|
||||
if (!p) {
|
||||
return this.header;
|
||||
}
|
||||
const name = path.basename(p);
|
||||
const node = this.searchNodeFromDirectory(path.dirname(p));
|
||||
if (!node.files) {
|
||||
node.files = Object.create(null);
|
||||
}
|
||||
if (!node.files[name]) {
|
||||
node.files[name] = Object.create(null);
|
||||
}
|
||||
return node.files[name];
|
||||
}
|
||||
insertDirectory(p, shouldUnpack) {
|
||||
const node = this.searchNodeFromPath(p);
|
||||
if (shouldUnpack) {
|
||||
node.unpacked = shouldUnpack;
|
||||
}
|
||||
node.files = node.files || Object.create(null);
|
||||
return node.files;
|
||||
}
|
||||
async insertFile(p, streamGenerator, shouldUnpack, file, options = {}) {
|
||||
const dirNode = this.searchNodeFromPath(path.dirname(p));
|
||||
const node = this.searchNodeFromPath(p);
|
||||
if (shouldUnpack || dirNode.unpacked) {
|
||||
node.size = file.stat.size;
|
||||
node.unpacked = true;
|
||||
node.integrity = await (0, integrity_1.getFileIntegrity)(streamGenerator());
|
||||
return Promise.resolve();
|
||||
}
|
||||
let size;
|
||||
const transformed = options.transform && options.transform(p);
|
||||
if (transformed) {
|
||||
const tmpdir = await wrapped_fs_1.default.mkdtemp(path.join(os.tmpdir(), 'asar-'));
|
||||
const tmpfile = path.join(tmpdir, path.basename(p));
|
||||
const out = wrapped_fs_1.default.createWriteStream(tmpfile);
|
||||
await pipeline(streamGenerator(), transformed, out);
|
||||
file.transformed = {
|
||||
path: tmpfile,
|
||||
stat: await wrapped_fs_1.default.lstat(tmpfile),
|
||||
};
|
||||
size = file.transformed.stat.size;
|
||||
}
|
||||
else {
|
||||
size = file.stat.size;
|
||||
}
|
||||
// JavaScript cannot precisely present integers >= UINT32_MAX.
|
||||
if (size > UINT32_MAX) {
|
||||
throw new Error(`${p}: file size can not be larger than 4.2GB`);
|
||||
}
|
||||
node.size = size;
|
||||
node.offset = this.offset.toString();
|
||||
node.integrity = await (0, integrity_1.getFileIntegrity)(streamGenerator());
|
||||
if (process.platform !== 'win32' && file.stat.mode & 0o100) {
|
||||
node.executable = true;
|
||||
}
|
||||
this.offset += BigInt(size);
|
||||
}
|
||||
insertLink(p, shouldUnpack, parentPath = wrapped_fs_1.default.realpathSync(path.dirname(p)), symlink = wrapped_fs_1.default.readlinkSync(p), // /var/tmp => /private/var
|
||||
src = wrapped_fs_1.default.realpathSync(this.src)) {
|
||||
const link = this.resolveLink(src, parentPath, symlink);
|
||||
if (link.startsWith('..')) {
|
||||
throw new Error(`${p}: file "${link}" links out of the package`);
|
||||
}
|
||||
const node = this.searchNodeFromPath(p);
|
||||
const dirNode = this.searchNodeFromPath(path.dirname(p));
|
||||
if (shouldUnpack || dirNode.unpacked) {
|
||||
node.unpacked = true;
|
||||
}
|
||||
node.link = link;
|
||||
return link;
|
||||
}
|
||||
resolveLink(src, parentPath, symlink) {
|
||||
const target = path.join(parentPath, symlink);
|
||||
const link = path.relative(src, target);
|
||||
return link;
|
||||
}
|
||||
listFiles(options) {
|
||||
const files = [];
|
||||
const fillFilesFromMetadata = function (basePath, metadata) {
|
||||
if (!('files' in metadata)) {
|
||||
return;
|
||||
}
|
||||
for (const [childPath, childMetadata] of Object.entries(metadata.files)) {
|
||||
const fullPath = path.join(basePath, childPath);
|
||||
const packState = 'unpacked' in childMetadata && childMetadata.unpacked ? 'unpack' : 'pack ';
|
||||
files.push(options && options.isPack ? `${packState} : ${fullPath}` : fullPath);
|
||||
fillFilesFromMetadata(fullPath, childMetadata);
|
||||
}
|
||||
};
|
||||
fillFilesFromMetadata('/', this.header);
|
||||
return files;
|
||||
}
|
||||
getNode(p, followLinks = true) {
|
||||
const node = this.searchNodeFromDirectory(path.dirname(p));
|
||||
const name = path.basename(p);
|
||||
if ('link' in node && followLinks) {
|
||||
return this.getNode(path.join(node.link, name));
|
||||
}
|
||||
if (name) {
|
||||
return node.files[name];
|
||||
}
|
||||
else {
|
||||
return node;
|
||||
}
|
||||
}
|
||||
getFile(p, followLinks = true) {
|
||||
const info = this.getNode(p, followLinks);
|
||||
if (!info) {
|
||||
throw new Error(`"${p}" was not found in this archive`);
|
||||
}
|
||||
// if followLinks is false we don't resolve symlinks
|
||||
if ('link' in info && followLinks) {
|
||||
return this.getFile(info.link, followLinks);
|
||||
}
|
||||
else {
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.Filesystem = Filesystem;
|
||||
//# sourceMappingURL=filesystem.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/filesystem.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/filesystem.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
7
desktop-operator/node_modules/@electron/asar/lib/integrity.d.ts
generated
vendored
Normal file
7
desktop-operator/node_modules/@electron/asar/lib/integrity.d.ts
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export type FileIntegrity = {
|
||||
algorithm: 'SHA256';
|
||||
hash: string;
|
||||
blockSize: number;
|
||||
blocks: string[];
|
||||
};
|
||||
export declare function getFileIntegrity(inputFileStream: NodeJS.ReadableStream): Promise<FileIntegrity>;
|
||||
75
desktop-operator/node_modules/@electron/asar/lib/integrity.js
generated
vendored
Normal file
75
desktop-operator/node_modules/@electron/asar/lib/integrity.js
generated
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getFileIntegrity = getFileIntegrity;
|
||||
const crypto = __importStar(require("crypto"));
|
||||
const stream = __importStar(require("stream"));
|
||||
const util_1 = require("util");
|
||||
const ALGORITHM = 'SHA256';
|
||||
// 4MB default block size
|
||||
const BLOCK_SIZE = 4 * 1024 * 1024;
|
||||
const pipeline = (0, util_1.promisify)(stream.pipeline);
|
||||
function hashBlock(block) {
|
||||
return crypto.createHash(ALGORITHM).update(block).digest('hex');
|
||||
}
|
||||
async function getFileIntegrity(inputFileStream) {
|
||||
const fileHash = crypto.createHash(ALGORITHM);
|
||||
const blockHashes = [];
|
||||
let currentBlockSize = 0;
|
||||
let currentBlock = [];
|
||||
await pipeline(inputFileStream, new stream.PassThrough({
|
||||
decodeStrings: false,
|
||||
transform(_chunk, encoding, callback) {
|
||||
fileHash.update(_chunk);
|
||||
function handleChunk(chunk) {
|
||||
const diffToSlice = Math.min(BLOCK_SIZE - currentBlockSize, chunk.byteLength);
|
||||
currentBlockSize += diffToSlice;
|
||||
currentBlock.push(chunk.slice(0, diffToSlice));
|
||||
if (currentBlockSize === BLOCK_SIZE) {
|
||||
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
|
||||
currentBlock = [];
|
||||
currentBlockSize = 0;
|
||||
}
|
||||
if (diffToSlice < chunk.byteLength) {
|
||||
handleChunk(chunk.slice(diffToSlice));
|
||||
}
|
||||
}
|
||||
handleChunk(_chunk);
|
||||
callback();
|
||||
},
|
||||
flush(callback) {
|
||||
blockHashes.push(hashBlock(Buffer.concat(currentBlock)));
|
||||
currentBlock = [];
|
||||
callback();
|
||||
},
|
||||
}));
|
||||
return {
|
||||
algorithm: ALGORITHM,
|
||||
hash: fileHash.digest('hex'),
|
||||
blockSize: BLOCK_SIZE,
|
||||
blocks: blockHashes,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=integrity.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/integrity.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/integrity.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"integrity.js","sourceRoot":"","sources":["../src/integrity.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAsBA,4CA8CC;AApED,+CAAiC;AAEjC,+CAAiC;AACjC,+BAAiC;AAEjC,MAAM,SAAS,GAAG,QAAQ,CAAC;AAC3B,yBAAyB;AACzB,MAAM,UAAU,GAAG,CAAC,GAAG,IAAI,GAAG,IAAI,CAAC;AAEnC,MAAM,QAAQ,GAAG,IAAA,gBAAS,EAAC,MAAM,CAAC,QAAQ,CAAC,CAAC;AAE5C,SAAS,SAAS,CAAC,KAAa;IAC9B,OAAO,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;AAClE,CAAC;AASM,KAAK,UAAU,gBAAgB,CACpC,eAAsC;IAEtC,MAAM,QAAQ,GAAG,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC;IAE9C,MAAM,WAAW,GAAa,EAAE,CAAC;IACjC,IAAI,gBAAgB,GAAG,CAAC,CAAC;IACzB,IAAI,YAAY,GAAa,EAAE,CAAC;IAEhC,MAAM,QAAQ,CACZ,eAAe,EACf,IAAI,MAAM,CAAC,WAAW,CAAC;QACrB,aAAa,EAAE,KAAK;QACpB,SAAS,CAAC,MAAc,EAAE,QAAQ,EAAE,QAAQ;YAC1C,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAExB,SAAS,WAAW,CAAC,KAAa;gBAChC,MAAM,WAAW,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,GAAG,gBAAgB,EAAE,KAAK,CAAC,UAAU,CAAC,CAAC;gBAC9E,gBAAgB,IAAI,WAAW,CAAC;gBAChC,YAAY,CAAC,IAAI,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,EAAE,WAAW,CAAC,CAAC,CAAC;gBAC/C,IAAI,gBAAgB,KAAK,UAAU,EAAE,CAAC;oBACpC,WAAW,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;oBACzD,YAAY,GAAG,EAAE,CAAC;oBAClB,gBAAgB,GAAG,CAAC,CAAC;gBACvB,CAAC;gBACD,IAAI,WAAW,GAAG,KAAK,CAAC,UAAU,EAAE,CAAC;oBACnC,WAAW,CAAC,KAAK,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC;gBACxC,CAAC;YACH,CAAC;YACD,WAAW,CAAC,MAAM,CAAC,CAAC;YACpB,QAAQ,EAAE,CAAC;QACb,CAAC;QACD,KAAK,CAAC,QAAQ;YACZ,WAAW,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,MAAM,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC;YACzD,YAAY,GAAG,EAAE,CAAC;YAClB,QAAQ,EAAE,CAAC;QACb,CAAC;KACF,CAAC,CACH,CAAC;IAEF,OAAO;QACL,SAAS,EAAE,SAAS;QACpB,IAAI,EAAE,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC;QAC5B,SAAS,EAAE,UAAU;QACrB,MAAM,EAAE,WAAW;KACpB,CAAC;AACJ,CAAC"}
|
||||
46
desktop-operator/node_modules/@electron/asar/lib/pickle.d.ts
generated
vendored
Normal file
46
desktop-operator/node_modules/@electron/asar/lib/pickle.d.ts
generated
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
declare class PickleIterator {
|
||||
private payload;
|
||||
private payloadOffset;
|
||||
private readIndex;
|
||||
private endIndex;
|
||||
constructor(pickle: Pickle);
|
||||
readBool(): boolean;
|
||||
readInt(): number;
|
||||
readUInt32(): number;
|
||||
readInt64(): bigint;
|
||||
readUInt64(): bigint;
|
||||
readFloat(): number;
|
||||
readDouble(): number;
|
||||
readString(): string;
|
||||
readBytes(length: number): Buffer;
|
||||
readBytes<R, F extends (...args: any[]) => R>(length: number, method: F): R;
|
||||
getReadPayloadOffsetAndAdvance(length: number): number;
|
||||
advance(size: number): void;
|
||||
}
|
||||
export declare class Pickle {
|
||||
private header;
|
||||
private headerSize;
|
||||
private capacityAfterHeader;
|
||||
private writeOffset;
|
||||
private constructor();
|
||||
static createEmpty(): Pickle;
|
||||
static createFromBuffer(buffer: Buffer): Pickle;
|
||||
getHeader(): Buffer;
|
||||
getHeaderSize(): number;
|
||||
createIterator(): PickleIterator;
|
||||
toBuffer(): Buffer;
|
||||
writeBool(value: boolean): boolean;
|
||||
writeInt(value: number): boolean;
|
||||
writeUInt32(value: number): boolean;
|
||||
writeInt64(value: number): boolean;
|
||||
writeUInt64(value: number): boolean;
|
||||
writeFloat(value: number): boolean;
|
||||
writeDouble(value: number): boolean;
|
||||
writeString(value: string): boolean;
|
||||
setPayloadSize(payloadSize: number): number;
|
||||
getPayloadSize(): number;
|
||||
writeBytes(data: string, length: number, method?: undefined): boolean;
|
||||
writeBytes(data: number | BigInt, length: number, method: Function): boolean;
|
||||
resize(newCapacity: number): void;
|
||||
}
|
||||
export {};
|
||||
199
desktop-operator/node_modules/@electron/asar/lib/pickle.js
generated
vendored
Normal file
199
desktop-operator/node_modules/@electron/asar/lib/pickle.js
generated
vendored
Normal file
@@ -0,0 +1,199 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.Pickle = void 0;
|
||||
// sizeof(T).
|
||||
const SIZE_INT32 = 4;
|
||||
const SIZE_UINT32 = 4;
|
||||
const SIZE_INT64 = 8;
|
||||
const SIZE_UINT64 = 8;
|
||||
const SIZE_FLOAT = 4;
|
||||
const SIZE_DOUBLE = 8;
|
||||
// The allocation granularity of the payload.
|
||||
const PAYLOAD_UNIT = 64;
|
||||
// Largest JS number.
|
||||
const CAPACITY_READ_ONLY = 9007199254740992;
|
||||
// Aligns 'i' by rounding it up to the next multiple of 'alignment'.
|
||||
const alignInt = function (i, alignment) {
|
||||
return i + ((alignment - (i % alignment)) % alignment);
|
||||
};
|
||||
// PickleIterator reads data from a Pickle. The Pickle object must remain valid
|
||||
// while the PickleIterator object is in use.
|
||||
class PickleIterator {
|
||||
constructor(pickle) {
|
||||
this.payload = pickle.getHeader();
|
||||
this.payloadOffset = pickle.getHeaderSize();
|
||||
this.readIndex = 0;
|
||||
this.endIndex = pickle.getPayloadSize();
|
||||
}
|
||||
readBool() {
|
||||
return this.readInt() !== 0;
|
||||
}
|
||||
readInt() {
|
||||
return this.readBytes(SIZE_INT32, Buffer.prototype.readInt32LE);
|
||||
}
|
||||
readUInt32() {
|
||||
return this.readBytes(SIZE_UINT32, Buffer.prototype.readUInt32LE);
|
||||
}
|
||||
readInt64() {
|
||||
return this.readBytes(SIZE_INT64, Buffer.prototype.readBigInt64LE);
|
||||
}
|
||||
readUInt64() {
|
||||
return this.readBytes(SIZE_UINT64, Buffer.prototype.readBigUInt64LE);
|
||||
}
|
||||
readFloat() {
|
||||
return this.readBytes(SIZE_FLOAT, Buffer.prototype.readFloatLE);
|
||||
}
|
||||
readDouble() {
|
||||
return this.readBytes(SIZE_DOUBLE, Buffer.prototype.readDoubleLE);
|
||||
}
|
||||
readString() {
|
||||
return this.readBytes(this.readInt()).toString();
|
||||
}
|
||||
readBytes(length, method) {
|
||||
const readPayloadOffset = this.getReadPayloadOffsetAndAdvance(length);
|
||||
if (method != null) {
|
||||
return method.call(this.payload, readPayloadOffset, length);
|
||||
}
|
||||
else {
|
||||
return this.payload.slice(readPayloadOffset, readPayloadOffset + length);
|
||||
}
|
||||
}
|
||||
getReadPayloadOffsetAndAdvance(length) {
|
||||
if (length > this.endIndex - this.readIndex) {
|
||||
this.readIndex = this.endIndex;
|
||||
throw new Error('Failed to read data with length of ' + length);
|
||||
}
|
||||
const readPayloadOffset = this.payloadOffset + this.readIndex;
|
||||
this.advance(length);
|
||||
return readPayloadOffset;
|
||||
}
|
||||
advance(size) {
|
||||
const alignedSize = alignInt(size, SIZE_UINT32);
|
||||
if (this.endIndex - this.readIndex < alignedSize) {
|
||||
this.readIndex = this.endIndex;
|
||||
}
|
||||
else {
|
||||
this.readIndex += alignedSize;
|
||||
}
|
||||
}
|
||||
}
|
||||
// This class provides facilities for basic binary value packing and unpacking.
|
||||
//
|
||||
// The Pickle class supports appending primitive values (ints, strings, etc.)
|
||||
// to a pickle instance. The Pickle instance grows its internal memory buffer
|
||||
// dynamically to hold the sequence of primitive values. The internal memory
|
||||
// buffer is exposed as the "data" of the Pickle. This "data" can be passed
|
||||
// to a Pickle object to initialize it for reading.
|
||||
//
|
||||
// When reading from a Pickle object, it is important for the consumer to know
|
||||
// what value types to read and in what order to read them as the Pickle does
|
||||
// not keep track of the type of data written to it.
|
||||
//
|
||||
// The Pickle's data has a header which contains the size of the Pickle's
|
||||
// payload. It can optionally support additional space in the header. That
|
||||
// space is controlled by the header_size parameter passed to the Pickle
|
||||
// constructor.
|
||||
class Pickle {
|
||||
constructor(buffer) {
|
||||
if (buffer) {
|
||||
this.header = buffer;
|
||||
this.headerSize = buffer.length - this.getPayloadSize();
|
||||
this.capacityAfterHeader = CAPACITY_READ_ONLY;
|
||||
this.writeOffset = 0;
|
||||
if (this.headerSize > buffer.length) {
|
||||
this.headerSize = 0;
|
||||
}
|
||||
if (this.headerSize !== alignInt(this.headerSize, SIZE_UINT32)) {
|
||||
this.headerSize = 0;
|
||||
}
|
||||
if (this.headerSize === 0) {
|
||||
this.header = Buffer.alloc(0);
|
||||
}
|
||||
}
|
||||
else {
|
||||
this.header = Buffer.alloc(0);
|
||||
this.headerSize = SIZE_UINT32;
|
||||
this.capacityAfterHeader = 0;
|
||||
this.writeOffset = 0;
|
||||
this.resize(PAYLOAD_UNIT);
|
||||
this.setPayloadSize(0);
|
||||
}
|
||||
}
|
||||
static createEmpty() {
|
||||
return new Pickle();
|
||||
}
|
||||
static createFromBuffer(buffer) {
|
||||
return new Pickle(buffer);
|
||||
}
|
||||
getHeader() {
|
||||
return this.header;
|
||||
}
|
||||
getHeaderSize() {
|
||||
return this.headerSize;
|
||||
}
|
||||
createIterator() {
|
||||
return new PickleIterator(this);
|
||||
}
|
||||
toBuffer() {
|
||||
return this.header.slice(0, this.headerSize + this.getPayloadSize());
|
||||
}
|
||||
writeBool(value) {
|
||||
return this.writeInt(value ? 1 : 0);
|
||||
}
|
||||
writeInt(value) {
|
||||
return this.writeBytes(value, SIZE_INT32, Buffer.prototype.writeInt32LE);
|
||||
}
|
||||
writeUInt32(value) {
|
||||
return this.writeBytes(value, SIZE_UINT32, Buffer.prototype.writeUInt32LE);
|
||||
}
|
||||
writeInt64(value) {
|
||||
return this.writeBytes(BigInt(value), SIZE_INT64, Buffer.prototype.writeBigInt64LE);
|
||||
}
|
||||
writeUInt64(value) {
|
||||
return this.writeBytes(BigInt(value), SIZE_UINT64, Buffer.prototype.writeBigUInt64LE);
|
||||
}
|
||||
writeFloat(value) {
|
||||
return this.writeBytes(value, SIZE_FLOAT, Buffer.prototype.writeFloatLE);
|
||||
}
|
||||
writeDouble(value) {
|
||||
return this.writeBytes(value, SIZE_DOUBLE, Buffer.prototype.writeDoubleLE);
|
||||
}
|
||||
writeString(value) {
|
||||
const length = Buffer.byteLength(value, 'utf8');
|
||||
if (!this.writeInt(length)) {
|
||||
return false;
|
||||
}
|
||||
return this.writeBytes(value, length);
|
||||
}
|
||||
setPayloadSize(payloadSize) {
|
||||
return this.header.writeUInt32LE(payloadSize, 0);
|
||||
}
|
||||
getPayloadSize() {
|
||||
return this.header.readUInt32LE(0);
|
||||
}
|
||||
writeBytes(data, length, method) {
|
||||
const dataLength = alignInt(length, SIZE_UINT32);
|
||||
const newSize = this.writeOffset + dataLength;
|
||||
if (newSize > this.capacityAfterHeader) {
|
||||
this.resize(Math.max(this.capacityAfterHeader * 2, newSize));
|
||||
}
|
||||
if (method) {
|
||||
method.call(this.header, data, this.headerSize + this.writeOffset);
|
||||
}
|
||||
else {
|
||||
this.header.write(data, this.headerSize + this.writeOffset, length);
|
||||
}
|
||||
const endOffset = this.headerSize + this.writeOffset + length;
|
||||
this.header.fill(0, endOffset, endOffset + dataLength - length);
|
||||
this.setPayloadSize(newSize);
|
||||
this.writeOffset = newSize;
|
||||
return true;
|
||||
}
|
||||
resize(newCapacity) {
|
||||
newCapacity = alignInt(newCapacity, PAYLOAD_UNIT);
|
||||
this.header = Buffer.concat([this.header, Buffer.alloc(newCapacity)]);
|
||||
this.capacityAfterHeader = newCapacity;
|
||||
}
|
||||
}
|
||||
exports.Pickle = Pickle;
|
||||
//# sourceMappingURL=pickle.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/pickle.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/pickle.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
157
desktop-operator/node_modules/@electron/asar/lib/types/glob.d.ts
generated
vendored
Normal file
157
desktop-operator/node_modules/@electron/asar/lib/types/glob.d.ts
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
/**
|
||||
* TODO(erikian): remove this file once we upgrade to the latest `glob` version.
|
||||
* https://github.com/electron/asar/pull/332#issuecomment-2435407933
|
||||
*/
|
||||
interface IMinimatchOptions {
|
||||
/**
|
||||
* Dump a ton of stuff to stderr.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
debug?: boolean | undefined;
|
||||
/**
|
||||
* Do not expand `{a,b}` and `{1..3}` brace sets.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nobrace?: boolean | undefined;
|
||||
/**
|
||||
* Disable `**` matching against multiple folder names.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noglobstar?: boolean | undefined;
|
||||
/**
|
||||
* Allow patterns to match filenames starting with a period,
|
||||
* even if the pattern does not explicitly have a period in that spot.
|
||||
*
|
||||
* Note that by default, `'a/**' + '/b'` will **not** match `a/.d/b`, unless `dot` is set.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
dot?: boolean | undefined;
|
||||
/**
|
||||
* Disable "extglob" style patterns like `+(a|b)`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
noext?: boolean | undefined;
|
||||
/**
|
||||
* Perform a case-insensitive match.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocase?: boolean | undefined;
|
||||
/**
|
||||
* When a match is not found by `minimatch.match`,
|
||||
* return a list containing the pattern itself if this option is set.
|
||||
* Otherwise, an empty list is returned if there are no matches.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonull?: boolean | undefined;
|
||||
/**
|
||||
* If set, then patterns without slashes will be matched
|
||||
* against the basename of the path if it contains slashes. For example,
|
||||
* `a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
matchBase?: boolean | undefined;
|
||||
/**
|
||||
* Suppress the behavior of treating `#` at the start of a pattern as a comment.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nocomment?: boolean | undefined;
|
||||
/**
|
||||
* Suppress the behavior of treating a leading `!` character as negation.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
nonegate?: boolean | undefined;
|
||||
/**
|
||||
* Returns from negate expressions the same as if they were not negated.
|
||||
* (Ie, true on a hit, false on a miss.)
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
flipNegate?: boolean | undefined;
|
||||
/**
|
||||
* Compare a partial path to a pattern. As long as the parts of the path that
|
||||
* are present are not contradicted by the pattern, it will be treated as a
|
||||
* match. This is useful in applications where you're walking through a
|
||||
* folder structure, and don't yet have the full path, but want to ensure that
|
||||
* you do not walk down paths that can never be a match.
|
||||
*
|
||||
* @default false
|
||||
*
|
||||
* @example
|
||||
* import minimatch = require("minimatch");
|
||||
*
|
||||
* minimatch('/a/b', '/a/*' + '/c/d', { partial: true }) // true, might be /a/b/c/d
|
||||
* minimatch('/a/b', '/**' + '/d', { partial: true }) // true, might be /a/b/.../d
|
||||
* minimatch('/x/y/z', '/a/**' + '/z', { partial: true }) // false, because x !== a
|
||||
*/
|
||||
partial?: boolean;
|
||||
/**
|
||||
* Use `\\` as a path separator _only_, and _never_ as an escape
|
||||
* character. If set, all `\\` characters are replaced with `/` in
|
||||
* the pattern. Note that this makes it **impossible** to match
|
||||
* against paths containing literal glob pattern characters, but
|
||||
* allows matching with patterns constructed using `path.join()` and
|
||||
* `path.resolve()` on Windows platforms, mimicking the (buggy!)
|
||||
* behavior of earlier versions on Windows. Please use with
|
||||
* caution, and be mindful of the caveat about Windows paths
|
||||
*
|
||||
* For legacy reasons, this is also set if
|
||||
* `options.allowWindowsEscape` is set to the exact value `false`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
windowsPathsNoEscape?: boolean;
|
||||
}
|
||||
export interface IOptions extends IMinimatchOptions {
|
||||
cwd?: string | undefined;
|
||||
root?: string | undefined;
|
||||
dot?: boolean | undefined;
|
||||
nomount?: boolean | undefined;
|
||||
mark?: boolean | undefined;
|
||||
nosort?: boolean | undefined;
|
||||
stat?: boolean | undefined;
|
||||
silent?: boolean | undefined;
|
||||
strict?: boolean | undefined;
|
||||
cache?: {
|
||||
[path: string]: boolean | 'DIR' | 'FILE' | ReadonlyArray<string>;
|
||||
} | undefined;
|
||||
statCache?: {
|
||||
[path: string]: false | {
|
||||
isDirectory(): boolean;
|
||||
} | undefined;
|
||||
} | undefined;
|
||||
symlinks?: {
|
||||
[path: string]: boolean | undefined;
|
||||
} | undefined;
|
||||
realpathCache?: {
|
||||
[path: string]: string;
|
||||
} | undefined;
|
||||
sync?: boolean | undefined;
|
||||
nounique?: boolean | undefined;
|
||||
nonull?: boolean | undefined;
|
||||
debug?: boolean | undefined;
|
||||
nobrace?: boolean | undefined;
|
||||
noglobstar?: boolean | undefined;
|
||||
noext?: boolean | undefined;
|
||||
nocase?: boolean | undefined;
|
||||
matchBase?: any;
|
||||
nodir?: boolean | undefined;
|
||||
ignore?: string | ReadonlyArray<string> | undefined;
|
||||
follow?: boolean | undefined;
|
||||
realpath?: boolean | undefined;
|
||||
nonegate?: boolean | undefined;
|
||||
nocomment?: boolean | undefined;
|
||||
absolute?: boolean | undefined;
|
||||
allowWindowsEscape?: boolean | undefined;
|
||||
fs?: typeof import('fs');
|
||||
}
|
||||
export {};
|
||||
3
desktop-operator/node_modules/@electron/asar/lib/types/glob.js
generated
vendored
Normal file
3
desktop-operator/node_modules/@electron/asar/lib/types/glob.js
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=glob.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/types/glob.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/types/glob.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"glob.js","sourceRoot":"","sources":["../../src/types/glob.ts"],"names":[],"mappings":""}
|
||||
13
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.d.ts
generated
vendored
Normal file
13
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
type AsarFS = typeof import('fs') & {
|
||||
mkdirp(dir: string): Promise<void>;
|
||||
mkdirpSync(dir: string): void;
|
||||
lstat: (typeof import('fs'))['promises']['lstat'];
|
||||
mkdtemp: (typeof import('fs'))['promises']['mkdtemp'];
|
||||
readFile: (typeof import('fs'))['promises']['readFile'];
|
||||
stat: (typeof import('fs'))['promises']['stat'];
|
||||
writeFile: (typeof import('fs'))['promises']['writeFile'];
|
||||
symlink: (typeof import('fs'))['promises']['symlink'];
|
||||
readlink: (typeof import('fs'))['promises']['readlink'];
|
||||
};
|
||||
declare const promisified: AsarFS;
|
||||
export default promisified;
|
||||
26
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js
generated
vendored
Normal file
26
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = 'electron' in process.versions ? require('original-fs') : require('fs');
|
||||
const promisifiedMethods = [
|
||||
'lstat',
|
||||
'mkdtemp',
|
||||
'readFile',
|
||||
'stat',
|
||||
'writeFile',
|
||||
'symlink',
|
||||
'readlink',
|
||||
];
|
||||
const promisified = {};
|
||||
for (const method of Object.keys(fs)) {
|
||||
if (promisifiedMethods.includes(method)) {
|
||||
promisified[method] = fs.promises[method];
|
||||
}
|
||||
else {
|
||||
promisified[method] = fs[method];
|
||||
}
|
||||
}
|
||||
// To make it more like fs-extra
|
||||
promisified.mkdirp = (dir) => fs.promises.mkdir(dir, { recursive: true });
|
||||
promisified.mkdirpSync = (dir) => fs.mkdirSync(dir, { recursive: true });
|
||||
exports.default = promisified;
|
||||
//# sourceMappingURL=wrapped-fs.js.map
|
||||
1
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js.map
generated
vendored
Normal file
1
desktop-operator/node_modules/@electron/asar/lib/wrapped-fs.js.map
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"wrapped-fs.js","sourceRoot":"","sources":["../src/wrapped-fs.ts"],"names":[],"mappings":";;AAAA,MAAM,EAAE,GAAG,UAAU,IAAI,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAEnF,MAAM,kBAAkB,GAAG;IACzB,OAAO;IACP,SAAS;IACT,UAAU;IACV,MAAM;IACN,WAAW;IACX,SAAS;IACT,UAAU;CACX,CAAC;AAcF,MAAM,WAAW,GAAG,EAAY,CAAC;AAEjC,KAAK,MAAM,MAAM,IAAI,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,CAAC;IACrC,IAAI,kBAAkB,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CAAC;QACvC,WAAmB,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;IACrD,CAAC;SAAM,CAAC;QACL,WAAmB,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,MAAM,CAAC,CAAC;IAC5C,CAAC;AACH,CAAC;AACD,gCAAgC;AAChC,WAAW,CAAC,MAAM,GAAG,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,KAAK,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;AAC1E,WAAW,CAAC,UAAU,GAAG,CAAC,GAAG,EAAE,EAAE,CAAC,EAAE,CAAC,SAAS,CAAC,GAAG,EAAE,EAAE,SAAS,EAAE,IAAI,EAAE,CAAC,CAAC;AAEzE,kBAAe,WAAW,CAAC"}
|
||||
Reference in New Issue
Block a user