This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,19 @@
const Stream = require('stream');
module.exports = function(entry) {
return new Promise(function(resolve, reject) {
const chunks = [];
const bufferStream = Stream.Transform()
.on('finish', function() {
resolve(Buffer.concat(chunks));
})
.on('error', reject);
bufferStream._transform = function(d, e, cb) {
chunks.push(d);
cb();
};
entry.on('error', reject)
.pipe(bufferStream);
});
};

94
resources/app/node_modules/unzipper/lib/Decrypt.js generated vendored Normal file
View File

@@ -0,0 +1,94 @@
const Int64 = require("node-int64");
let Stream = require("stream");
// Backwards compatibility for node versions < 8
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
Stream = require("readable-stream");
let table;
function generateTable() {
const poly = 0xEDB88320;
let c, n, k;
table = [];
for (n = 0; n < 256; n++) {
c = n;
for (k = 0; k < 8; k++) c = c & 1 ? poly ^ (c >>> 1) : (c = c >>> 1);
table[n] = c >>> 0;
}
}
function crc(ch, crc) {
if (!table) generateTable();
if (ch.charCodeAt) ch = ch.charCodeAt(0);
const l = (crc.readUInt32BE() >> 8) & 0xffffff;
const r = table[(crc.readUInt32BE() ^ (ch >>> 0)) & 0xff];
return (l ^ r) >>> 0;
}
function multiply(a, b) {
const ah = (a >> 16) & 0xffff;
const al = a & 0xffff;
const bh = (b >> 16) & 0xffff;
const bl = b & 0xffff;
const high = (ah * bl + al * bh) & 0xffff;
return ((high << 16) >>> 0) + al * bl;
}
function Decrypt() {
if (!(this instanceof Decrypt)) return new Decrypt();
this.key0 = Buffer.allocUnsafe(4);
this.key1 = Buffer.allocUnsafe(4);
this.key2 = Buffer.allocUnsafe(4);
this.key0.writeUInt32BE(0x12345678, 0);
this.key1.writeUInt32BE(0x23456789, 0);
this.key2.writeUInt32BE(0x34567890, 0);
}
Decrypt.prototype.update = function (h) {
this.key0.writeUInt32BE(crc(h, this.key0));
this.key1.writeUInt32BE(
((this.key0.readUInt32BE() & 0xff & 0xFFFFFFFF) +
this.key1.readUInt32BE()) >>> 0
);
const x = new Int64(
(multiply(this.key1.readUInt32BE(), 134775813) + 1) & 0xFFFFFFFF
);
const b = Buffer.alloc(8);
x.copy(b, 0);
b.copy(this.key1, 0, 4, 8);
this.key2.writeUInt32BE(
crc(((this.key1.readUInt32BE() >> 24) & 0xff) >>> 0, this.key2)
);
};
Decrypt.prototype.decryptByte = function (c) {
const k = (this.key2.readUInt32BE() | 2) >>> 0;
c = c ^ ((multiply(k, (k ^ 1 >>> 0)) >> 8) & 0xff);
this.update(c);
return c;
};
Decrypt.prototype.stream = function () {
const stream = Stream.Transform(),
self = this;
stream._transform = function (d, e, cb) {
for (let i = 0; i < d.length; i++) {
d[i] = self.decryptByte(d[i]);
}
this.push(d);
cb();
};
return stream;
};
module.exports = Decrypt;

14
resources/app/node_modules/unzipper/lib/NoopStream.js generated vendored Normal file
View File

@@ -0,0 +1,14 @@
const Stream = require('stream');
const util = require('util');
function NoopStream() {
if (!(this instanceof NoopStream)) {
return new NoopStream();
}
Stream.Transform.call(this);
}
util.inherits(NoopStream, Stream.Transform);
NoopStream.prototype._transform = function(d, e, cb) { cb() ;};
module.exports = NoopStream;

View File

@@ -0,0 +1,243 @@
const PullStream = require('../PullStream');
const unzip = require('./unzip');
const BufferStream = require('../BufferStream');
const parseExtraField = require('../parseExtraField');
const path = require('path');
const fs = require('fs-extra');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
const Bluebird = require('bluebird');
const signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50, 0);
function getCrxHeader(source) {
const sourceStream = source.stream(0).pipe(PullStream());
return sourceStream.pull(4).then(function(data) {
const signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
let crxHeader;
return sourceStream.pull(12).then(function(data) {
crxHeader = parseBuffer.parse(data, [
['version', 4],
['pubKeyLength', 4],
['signatureLength', 4],
]);
}).then(function() {
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
}).then(function(data) {
crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
});
}
});
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function getZip64CentralDirectory(source, zip64CDL) {
const d64loc = parseBuffer.parse(zip64CDL, [
['signature', 4],
['diskNumber', 4],
['offsetToStartOfCentralDirectory', 8],
['numberOfDisks', 4],
]);
if (d64loc.signature != 0x07064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x07064b50): 0x' + d64loc.signature.toString(16));
}
const dir64 = PullStream();
source.stream(d64loc.offsetToStartOfCentralDirectory).pipe(dir64);
return dir64.pull(56);
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function parseZip64DirRecord (dir64record) {
const vars = parseBuffer.parse(dir64record, [
['signature', 4],
['sizeOfCentralDirectory', 8],
['version', 2],
['versionsNeededToExtract', 2],
['diskNumber', 4],
['diskStart', 4],
['numberOfRecordsOnDisk', 8],
['numberOfRecords', 8],
['sizeOfCentralDirectory', 8],
['offsetToStartOfCentralDirectory', 8],
]);
if (vars.signature != 0x06064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x06064b50): 0x0' + vars.signature.toString(16));
}
return vars;
}
module.exports = function centralDirectory(source, options) {
const endDir = PullStream();
const records = PullStream();
const tailSize = (options && options.tailSize) || 80;
let sourceSize,
crxHeader,
startOffset,
vars;
if (options && options.crx)
crxHeader = getCrxHeader(source);
return source.size()
.then(function(size) {
sourceSize = size;
source.stream(Math.max(0, size-tailSize))
.on('error', function (error) { endDir.emit('error', error); })
.pipe(endDir);
return endDir.pull(signature);
})
.then(function() {
return Bluebird.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(d) {
const data = d.directory;
startOffset = d.crxHeader && d.crxHeader.size || 0;
vars = parseBuffer.parse(data, [
['signature', 4],
['diskNumber', 2],
['diskStart', 2],
['numberOfRecordsOnDisk', 2],
['numberOfRecords', 2],
['sizeOfCentralDirectory', 4],
['offsetToStartOfCentralDirectory', 4],
['commentLength', 2],
]);
// Is this zip file using zip64 format? Use same check as Go:
// https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503
// For zip64 files, need to find zip64 central directory locator header to extract
// relative offset for zip64 central directory record.
if (vars.diskNumber == 0xffff || vars.numberOfRecords == 0xffff ||
vars.offsetToStartOfCentralDirectory == 0xffffffff) {
// Offset to zip64 CDL is 20 bytes before normal CDR
const zip64CDLSize = 20;
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize);
const zip64CDLStream = PullStream();
source.stream(zip64CDLOffset).pipe(zip64CDLStream);
return zip64CDLStream.pull(zip64CDLSize)
.then(function (d) { return getZip64CentralDirectory(source, d); })
.then(function (dir64record) {
vars = parseZip64DirRecord(dir64record);
});
} else {
vars.offsetToStartOfCentralDirectory += startOffset;
}
})
.then(function() {
if (vars.commentLength) return endDir.pull(vars.commentLength).then(function(comment) {
vars.comment = comment.toString('utf8');
});
})
.then(function() {
source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);
vars.extract = function(opts) {
if (!opts || !opts.path) throw new Error('PATH_MISSING');
// make sure path is normalized before using it
opts.path = path.resolve(path.normalize(opts.path));
return vars.files.then(function(files) {
return Bluebird.map(files, async function(entry) {
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
const extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
if (entry.type == 'Directory') {
await fs.ensureDir(extractPath);
return;
}
await fs.ensureDir(path.dirname(extractPath));
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : fs.createWriteStream(extractPath);
return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error', reject)
.pipe(writer)
.on('close', resolve)
.on('error', reject);
});
}, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 });
});
};
vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function() {
return records.pull(46).then(function(data) {
const vars = parseBuffer.parse(data, [
['signature', 4],
['versionMadeBy', 2],
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
['fileCommentLength', 2],
['diskNumber', 2],
['internalFileAttributes', 2],
['externalFileAttributes', 4],
['offsetToLocalFileHeader', 4],
]);
vars.offsetToLocalFileHeader += startOffset;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
vars.isUnicode = (vars.flags & 0x800) != 0;
return records.pull(vars.extraFieldLength);
})
.then(function(extraField) {
vars.extra = parseExtraField(extraField, vars);
return records.pull(vars.fileCommentLength);
})
.then(function(comment) {
vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path)) ? 'Directory' : 'File';
const padding = options && options.padding || 1000;
vars.stream = function(_password) {
const totalSize = 30
+ padding // add an extra buffer
+ (vars.extraFieldLength || 0)
+ (vars.fileNameLength || 0)
+ vars.compressedSize;
return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
});
});
});
return Bluebird.props(vars);
});
};

141
resources/app/node_modules/unzipper/lib/Open/index.js generated vendored Normal file
View File

@@ -0,0 +1,141 @@
const fs = require('graceful-fs');
const directory = require('./directory');
const Stream = require('stream');
module.exports = {
buffer: function(buffer, options) {
const source = {
stream: function(offset, length) {
const stream = Stream.PassThrough();
const end = length ? offset + length : undefined;
stream.end(buffer.slice(offset, end));
return stream;
},
size: function() {
return Promise.resolve(buffer.length);
}
};
return directory(source, options);
},
file: function(filename, options) {
const source = {
stream: function(start, length) {
const end = length ? start + length : undefined;
return fs.createReadStream(filename, {start, end});
},
size: function() {
return new Promise(function(resolve, reject) {
fs.stat(filename, function(err, d) {
if (err)
reject(err);
else
resolve(d.size);
});
});
}
};
return directory(source, options);
},
url: function(request, params, options) {
if (typeof params === 'string')
params = {url: params};
if (!params.url)
throw 'URL missing';
params.headers = params.headers || {};
const source = {
stream : function(offset, length) {
const options = Object.create(params);
const end = length ? offset + length : '';
options.headers = Object.create(params.headers);
options.headers.range = 'bytes='+offset+'-' + end;
return request(options);
},
size: function() {
return new Promise(function(resolve, reject) {
const req = request(params);
req.on('response', function(d) {
req.abort();
if (!d.headers['content-length'])
reject(new Error('Missing content length header'));
else
resolve(d.headers['content-length']);
}).on('error', reject);
});
}
};
return directory(source, options);
},
s3 : function(client, params, options) {
const source = {
size: function() {
return new Promise(function(resolve, reject) {
client.headObject(params, function(err, d) {
if (err)
reject(err);
else
resolve(d.ContentLength);
});
});
},
stream: function(offset, length) {
const d = {};
for (const key in params)
d[key] = params[key];
const end = length ? offset + length : '';
d.Range = 'bytes='+offset+'-' + end;
return client.getObject(d).createReadStream();
}
};
return directory(source, options);
},
s3_v3: function (client, params, options) {
//@ts-ignore
const { GetObjectCommand, HeadObjectCommand } = require('@aws-sdk/client-s3');
const source = {
size: async () => {
const head = await client.send(
new HeadObjectCommand({
Bucket: params.Bucket,
Key: params.Key,
})
);
if(!head.ContentLength) {
return 0;
}
return head.ContentLength;
},
stream: (offset, length) => {
const stream = Stream.PassThrough();
const end = length ? offset + length : "";
client
.send(
new GetObjectCommand({
Bucket: params.Bucket,
Key: params.Key,
Range: `bytes=${offset}-${end}`,
})
)
.then((response) => {
response.Body.pipe(stream);
})
.catch((error) => {
stream.emit("error", error);
});
return stream;
},
};
return directory(source, options);
},
custom: function(source, options) {
return directory(source, options);
}
};

120
resources/app/node_modules/unzipper/lib/Open/unzip.js generated vendored Normal file
View File

@@ -0,0 +1,120 @@
const Decrypt = require('../Decrypt');
const PullStream = require('../PullStream');
const Stream = require('stream');
const zlib = require('zlib');
const parseExtraField = require('../parseExtraField');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
module.exports = function unzip(source, offset, _password, directoryVars, length) {
const file = PullStream(),
entry = Stream.PassThrough();
const req = source.stream(offset, length);
req.pipe(file).on('error', function(e) {
entry.emit('error', e);
});
entry.vars = file.pull(30)
.then(function(data) {
let vars = parseBuffer.parse(data, [
['signature', 4],
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
]);
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return file.pull(vars.fileNameLength)
.then(function(fileName) {
vars.fileName = fileName.toString('utf8');
return file.pull(vars.extraFieldLength);
})
.then(function(extraField) {
let checkEncryption;
vars.extra = parseExtraField(extraField, vars);
// Ignore logal file header vars if the directory vars are available
if (directoryVars && directoryVars.compressedSize) vars = directoryVars;
if (vars.flags & 0x01) checkEncryption = file.pull(12)
.then(function(header) {
if (!_password)
throw new Error('MISSING_PASSWORD');
const decrypt = Decrypt();
String(_password).split('').forEach(function(d) {
decrypt.update(d);
});
for (let i=0; i < header.length; i++)
header[i] = decrypt.decryptByte(header[i]);
vars.decrypt = decrypt;
vars.compressedSize -= 12;
const check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
if (header[11] !== check)
throw new Error('BAD_PASSWORD');
return vars;
});
return Promise.resolve(checkEncryption)
.then(function() {
entry.emit('vars', vars);
return vars;
});
});
});
entry.vars.then(function(vars) {
const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;
const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
let stream = file.stream(eof);
if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());
stream
.pipe(inflater)
.on('error', function(err) { entry.emit('error', err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy();
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
.catch(function(e) {
entry.emit('error', e);
});
return entry;
};

139
resources/app/node_modules/unzipper/lib/PullStream.js generated vendored Normal file
View File

@@ -0,0 +1,139 @@
const Stream = require('stream');
const util = require('util');
const strFunction = 'function';
function PullStream() {
if (!(this instanceof PullStream))
return new PullStream();
Stream.Duplex.call(this, {decodeStrings:false, objectMode:true});
this.buffer = Buffer.from('');
const self = this;
self.on('finish', function() {
self.finished = true;
self.emit('chunk', false);
});
}
util.inherits(PullStream, Stream.Duplex);
PullStream.prototype._write = function(chunk, e, cb) {
this.buffer = Buffer.concat([this.buffer, chunk]);
this.cb = cb;
this.emit('chunk');
};
// The `eof` parameter is interpreted as `file_length` if the type is number
// otherwise (i.e. buffer) it is interpreted as a pattern signaling end of stream
PullStream.prototype.stream = function(eof, includeEof) {
const p = Stream.PassThrough();
let done;
const self= this;
function cb() {
if (typeof self.cb === strFunction) {
const callback = self.cb;
self.cb = undefined;
return callback();
}
}
function pull() {
let packet;
if (self.buffer && self.buffer.length) {
if (typeof eof === 'number') {
packet = self.buffer.slice(0, eof);
self.buffer = self.buffer.slice(eof);
eof -= packet.length;
done = done || !eof;
} else {
let match = self.buffer.indexOf(eof);
if (match !== -1) {
// store signature match byte offset to allow us to reference
// this for zip64 offset
self.match = match;
if (includeEof) match = match + eof.length;
packet = self.buffer.slice(0, match);
self.buffer = self.buffer.slice(match);
done = true;
} else {
const len = self.buffer.length - eof.length;
if (len <= 0) {
cb();
} else {
packet = self.buffer.slice(0, len);
self.buffer = self.buffer.slice(len);
}
}
}
if (packet) p.write(packet, function() {
if (self.buffer.length === 0 || (eof.length && self.buffer.length <= eof.length)) cb();
});
}
if (!done) {
if (self.finished) {
self.removeListener('chunk', pull);
self.emit('error', new Error('FILE_ENDED'));
return;
}
} else {
self.removeListener('chunk', pull);
p.end();
}
}
self.on('chunk', pull);
pull();
return p;
};
PullStream.prototype.pull = function(eof, includeEof) {
if (eof === 0) return Promise.resolve('');
// If we already have the required data in buffer
// we can resolve the request immediately
if (!isNaN(eof) && this.buffer.length > eof) {
const data = this.buffer.slice(0, eof);
this.buffer = this.buffer.slice(eof);
return Promise.resolve(data);
}
// Otherwise we stream until we have it
let buffer = Buffer.from('');
const self = this;
const concatStream = new Stream.Transform();
concatStream._transform = function(d, e, cb) {
buffer = Buffer.concat([buffer, d]);
cb();
};
let rejectHandler;
let pullStreamRejectHandler;
return new Promise(function(resolve, reject) {
rejectHandler = reject;
pullStreamRejectHandler = function(e) {
self.__emittedError = e;
reject(e);
};
if (self.finished)
return reject(new Error('FILE_ENDED'));
self.once('error', pullStreamRejectHandler); // reject any errors from pullstream itself
self.stream(eof, includeEof)
.on('error', reject)
.pipe(concatStream)
.on('finish', function() {resolve(buffer);})
.on('error', reject);
})
.finally(function() {
self.removeListener('error', rejectHandler);
self.removeListener('error', pullStreamRejectHandler);
});
};
PullStream.prototype._read = function(){};
module.exports = PullStream;

62
resources/app/node_modules/unzipper/lib/extract.js generated vendored Normal file
View File

@@ -0,0 +1,62 @@
module.exports = Extract;
const Parse = require('./parse');
const fs = require('fs-extra');
const path = require('path');
const stream = require('stream');
const duplexer2 = require('duplexer2');
function Extract (opts) {
// make sure path is normalized before using it
opts.path = path.resolve(path.normalize(opts.path));
const parser = new Parse(opts);
const outStream = new stream.Writable({objectMode: true});
outStream._write = async function(entry, encoding, cb) {
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
// NOTE: Need to normalize to forward slashes for UNIX OS's to properly
// ignore the zip slipped file entirely
const extractPath = path.join(opts.path, entry.path.replace(/\\/g, '/'));
if (extractPath.indexOf(opts.path) != 0) {
return cb();
}
if (entry.type == 'Directory') {
await fs.ensureDir(extractPath);
return cb();
}
await fs.ensureDir(path.dirname(extractPath));
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : fs.createWriteStream(extractPath);
entry.pipe(writer)
.on('error', cb)
.on('close', cb);
};
const extract = duplexer2(parser, outStream);
parser.once('crx-header', function(crxHeader) {
extract.crxHeader = crxHeader;
});
parser
.pipe(outStream)
.on('finish', function() {
extract.emit('close');
});
extract.promise = function() {
return new Promise(function(resolve, reject) {
extract.on('close', resolve);
extract.on('error', reject);
});
};
return extract;
}

288
resources/app/node_modules/unzipper/lib/parse.js generated vendored Normal file
View File

@@ -0,0 +1,288 @@
const util = require('util');
const zlib = require('zlib');
const Stream = require('stream');
const PullStream = require('./PullStream');
const NoopStream = require('./NoopStream');
const BufferStream = require('./BufferStream');
const parseExtraField = require('./parseExtraField');
const parseDateTime = require('./parseDateTime');
const pipeline = Stream.pipeline;
const parseBuffer = require('./parseBuffer');
const endDirectorySignature = Buffer.alloc(4);
endDirectorySignature.writeUInt32LE(0x06054b50, 0);
function Parse(opts) {
if (!(this instanceof Parse)) {
return new Parse(opts);
}
const self = this;
self._opts = opts || { verbose: false };
PullStream.call(self, self._opts);
self.on('finish', function() {
self.emit('end');
self.emit('close');
});
self._readRecord().catch(function(e) {
if (!self.__emittedError || self.__emittedError !== e)
self.emit('error', e);
});
}
util.inherits(Parse, PullStream);
Parse.prototype._readRecord = function () {
const self = this;
return self.pull(4).then(function(data) {
if (data.length === 0)
return;
const signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
return self._readCrxHeader();
}
if (signature === 0x04034b50) {
return self._readFile();
}
else if (signature === 0x02014b50) {
self.reachedCD = true;
return self._readCentralDirectoryFileHeader();
}
else if (signature === 0x06054b50) {
return self._readEndOfCentralDirectoryRecord();
}
else if (self.reachedCD) {
// _readEndOfCentralDirectoryRecord expects the EOCD
// signature to be consumed so set includeEof=true
const includeEof = true;
return self.pull(endDirectorySignature, includeEof).then(function() {
return self._readEndOfCentralDirectoryRecord();
});
}
else
self.emit('error', new Error('invalid signature: 0x' + signature.toString(16)));
}).then((function(loop) {
if(loop) {
return self._readRecord();
}
}));
};
Parse.prototype._readCrxHeader = function() {
const self = this;
return self.pull(12).then(function(data) {
self.crxHeader = parseBuffer.parse(data, [
['version', 4],
['pubKeyLength', 4],
['signatureLength', 4],
]);
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
}).then(function(data) {
self.crxHeader.publicKey = data.slice(0, self.crxHeader.pubKeyLength);
self.crxHeader.signature = data.slice(self.crxHeader.pubKeyLength);
self.emit('crx-header', self.crxHeader);
return true;
});
};
Parse.prototype._readFile = function () {
const self = this;
return self.pull(26).then(function(data) {
const vars = parseBuffer.parse(data, [
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
]);
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
if (self.crxHeader) vars.crxHeader = self.crxHeader;
return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
const fileName = fileNameBuffer.toString('utf8');
const entry = Stream.PassThrough();
let __autodraining = false;
entry.autodrain = function() {
__autodraining = true;
const draining = entry.pipe(NoopStream());
draining.promise = function() {
return new Promise(function(resolve, reject) {
draining.on('finish', resolve);
draining.on('error', reject);
});
};
return draining;
};
entry.buffer = function() {
return BufferStream(entry);
};
entry.path = fileName;
entry.props = {};
entry.props.path = fileName;
entry.props.pathBuffer = fileNameBuffer;
entry.props.flags = {
"isUnicode": (vars.flags & 0x800) != 0
};
entry.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(fileName)) ? 'Directory' : 'File';
if (self._opts.verbose) {
if (entry.type === 'Directory') {
console.log(' creating:', fileName);
} else if (entry.type === 'File') {
if (vars.compressionMethod === 0) {
console.log(' extracting:', fileName);
} else {
console.log(' inflating:', fileName);
}
}
}
return self.pull(vars.extraFieldLength).then(function(extraField) {
const extra = parseExtraField(extraField, vars);
entry.vars = vars;
entry.extra = extra;
if (self._opts.forceStream) {
self.push(entry);
} else {
self.emit('entry', entry);
if (self._readableState.pipesCount || (self._readableState.pipes && self._readableState.pipes.length))
self.push(entry);
}
if (self._opts.verbose)
console.log({
filename:fileName,
vars: vars,
extra: extra
});
const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;
entry.__autodraining = __autodraining; // expose __autodraining for test purposes
const inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
return new Promise(function(resolve, reject) {
pipeline(
self.stream(eof),
inflater,
entry,
function (err) {
if (err) {
return reject(err);
}
return fileSizeKnown ? resolve(fileSizeKnown) : self._processDataDescriptor(entry).then(resolve).catch(reject);
}
);
});
});
});
});
};
Parse.prototype._processDataDescriptor = function (entry) {
const self = this;
return self.pull(16).then(function(data) {
const vars = parseBuffer.parse(data, [
['dataDescriptorSignature', 4],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
]);
entry.size = vars.uncompressedSize;
return true;
});
};
Parse.prototype._readCentralDirectoryFileHeader = function () {
const self = this;
return self.pull(42).then(function(data) {
const vars = parseBuffer.parse(data, [
['versionMadeBy', 2],
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
['fileCommentLength', 2],
['diskNumber', 2],
['internalFileAttributes', 2],
['externalFileAttributes', 4],
['offsetToLocalFileHeader', 4],
]);
return self.pull(vars.fileNameLength).then(function(fileName) {
vars.fileName = fileName.toString('utf8');
return self.pull(vars.extraFieldLength);
})
.then(function() {
return self.pull(vars.fileCommentLength);
})
.then(function() {
return true;
});
});
};
Parse.prototype._readEndOfCentralDirectoryRecord = function() {
const self = this;
return self.pull(18).then(function(data) {
const vars = parseBuffer.parse(data, [
['diskNumber', 2],
['diskStart', 2],
['numberOfRecordsOnDisk', 2],
['numberOfRecords', 2],
['sizeOfCentralDirectory', 4],
['offsetToStartOfCentralDirectory', 4],
['commentLength', 2],
]);
return self.pull(vars.commentLength).then(function() {
self.end();
self.push(null);
});
});
};
Parse.prototype.promise = function() {
const self = this;
return new Promise(function(resolve, reject) {
self.on('finish', resolve);
self.on('error', reject);
});
};
module.exports = Parse;

55
resources/app/node_modules/unzipper/lib/parseBuffer.js generated vendored Normal file
View File

@@ -0,0 +1,55 @@
const parseUIntLE = function(buffer, offset, size) {
let result;
switch(size) {
case 1:
result = buffer.readUInt8(offset);
break;
case 2:
result = buffer.readUInt16LE(offset);
break;
case 4:
result = buffer.readUInt32LE(offset);
break;
case 8:
result = Number(buffer.readBigUInt64LE(offset));
break;
default:
throw new Error('Unsupported UInt LE size!');
}
return result;
};
/**
* Parses sequential unsigned little endian numbers from the head of the passed buffer according to
* the specified format passed. If the buffer is not large enough to satisfy the full format,
* null values will be assigned to the remaining keys.
* @param {*} buffer The buffer to sequentially extract numbers from.
* @param {*} format Expected format to follow when extrcting values from the buffer. A list of list entries
* with the following structure:
* [
* [
* <key>, // Name of the key to assign the extracted number to.
* <size> // The size in bytes of the number to extract. possible values are 1, 2, 4, 8.
* ],
* ...
* ]
* @returns An object with keys set to their associated extracted values.
*/
const parse = function(buffer, format) {
const result = {};
let offset = 0;
for(const [key, size] of format) {
if(buffer.length >= offset + size) {
result[key] = parseUIntLE(buffer, offset, size);
}
else {
result[key] = null;
}
offset += size;
}
return result;
};
module.exports = {
parse
};

View File

@@ -0,0 +1,13 @@
// Dates in zip file entries are stored as DosDateTime
// Spec is here: https://docs.microsoft.com/en-us/windows/win32/api/winbase/nf-winbase-dosdatetimetofiletime
module.exports = function parseDateTime(date, time) {
const day = date & 0x1F;
const month = date >> 5 & 0x0F;
const year = (date >> 9 & 0x7F) + 1980;
const seconds = time ? (time & 0x1F) * 2 : 0;
const minutes = time ? (time >> 5) & 0x3F : 0;
const hours = time ? (time >> 11): 0;
return new Date(Date.UTC(year, month-1, day, hours, minutes, seconds));
};

View File

@@ -0,0 +1,40 @@
const parseBuffer = require('./parseBuffer');
module.exports = function(extraField, vars) {
let extra;
// Find the ZIP64 header, if present.
while(!extra && extraField && extraField.length) {
const candidateExtra = parseBuffer.parse(extraField, [
['signature', 2],
['partSize', 2],
]);
if(candidateExtra.signature === 0x0001) {
// parse buffer based on data in ZIP64 central directory; order is important!
const fieldsToExpect = [];
if (vars.uncompressedSize === 0xffffffff) fieldsToExpect.push(['uncompressedSize', 8]);
if (vars.compressedSize === 0xffffffff) fieldsToExpect.push(['compressedSize', 8]);
if (vars.offsetToLocalFileHeader === 0xffffffff) fieldsToExpect.push(['offsetToLocalFileHeader', 8]);
// slice off the 4 bytes for signature and partSize
extra = parseBuffer.parse(extraField.slice(4), fieldsToExpect);
} else {
// Advance the buffer to the next part.
// The total size of this part is the 4 byte header + partsize.
extraField = extraField.slice(candidateExtra.partSize + 4);
}
}
extra = extra || {};
if (vars.compressedSize === 0xffffffff)
vars.compressedSize = extra.compressedSize;
if (vars.uncompressedSize === 0xffffffff)
vars.uncompressedSize= extra.uncompressedSize;
if (vars.offsetToLocalFileHeader === 0xffffffff)
vars.offsetToLocalFileHeader = extra.offsetToLocalFileHeader;
return extra;
};

54
resources/app/node_modules/unzipper/lib/parseOne.js generated vendored Normal file
View File

@@ -0,0 +1,54 @@
const Stream = require('stream');
const Parse = require('./parse');
const duplexer2 = require('duplexer2');
const BufferStream = require('./BufferStream');
function parseOne(match, opts) {
const inStream = Stream.PassThrough({objectMode:true});
const outStream = Stream.PassThrough();
const transform = Stream.Transform({objectMode:true});
const re = match instanceof RegExp ? match : (match && new RegExp(match));
let found;
transform._transform = function(entry, e, cb) {
if (found || (re && !re.exec(entry.path))) {
entry.autodrain();
return cb();
} else {
found = true;
out.emit('entry', entry);
entry.on('error', function(e) {
outStream.emit('error', e);
});
entry.pipe(outStream)
.on('error', function(err) {
cb(err);
})
.on('finish', function(d) {
cb(null, d);
});
}
};
inStream.pipe(Parse(opts))
.on('error', function(err) {
outStream.emit('error', err);
})
.pipe(transform)
.on('error', Object) // Silence error as its already addressed in transform
.on('finish', function() {
if (!found)
outStream.emit('error', new Error('PATTERN_NOT_FOUND'));
else
outStream.end();
});
const out = duplexer2(inStream, outStream);
out.buffer = function() {
return BufferStream(outStream);
};
return out;
}
module.exports = parseOne;