This commit is contained in:
2025-01-04 00:34:03 +01:00
parent 41829408dc
commit 0ca14bbc19
18111 changed files with 1871397 additions and 0 deletions

View File

@@ -0,0 +1,243 @@
const PullStream = require('../PullStream');
const unzip = require('./unzip');
const BufferStream = require('../BufferStream');
const parseExtraField = require('../parseExtraField');
const path = require('path');
const fs = require('fs-extra');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
const Bluebird = require('bluebird');
const signature = Buffer.alloc(4);
signature.writeUInt32LE(0x06054b50, 0);
function getCrxHeader(source) {
const sourceStream = source.stream(0).pipe(PullStream());
return sourceStream.pull(4).then(function(data) {
const signature = data.readUInt32LE(0);
if (signature === 0x34327243) {
let crxHeader;
return sourceStream.pull(12).then(function(data) {
crxHeader = parseBuffer.parse(data, [
['version', 4],
['pubKeyLength', 4],
['signatureLength', 4],
]);
}).then(function() {
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
}).then(function(data) {
crxHeader.publicKey = data.slice(0, crxHeader.pubKeyLength);
crxHeader.signature = data.slice(crxHeader.pubKeyLength);
crxHeader.size = 16 + crxHeader.pubKeyLength +crxHeader.signatureLength;
return crxHeader;
});
}
});
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function getZip64CentralDirectory(source, zip64CDL) {
const d64loc = parseBuffer.parse(zip64CDL, [
['signature', 4],
['diskNumber', 4],
['offsetToStartOfCentralDirectory', 8],
['numberOfDisks', 4],
]);
if (d64loc.signature != 0x07064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x07064b50): 0x' + d64loc.signature.toString(16));
}
const dir64 = PullStream();
source.stream(d64loc.offsetToStartOfCentralDirectory).pipe(dir64);
return dir64.pull(56);
}
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
function parseZip64DirRecord (dir64record) {
const vars = parseBuffer.parse(dir64record, [
['signature', 4],
['sizeOfCentralDirectory', 8],
['version', 2],
['versionsNeededToExtract', 2],
['diskNumber', 4],
['diskStart', 4],
['numberOfRecordsOnDisk', 8],
['numberOfRecords', 8],
['sizeOfCentralDirectory', 8],
['offsetToStartOfCentralDirectory', 8],
]);
if (vars.signature != 0x06064b50) {
throw new Error('invalid zip64 end of central dir locator signature (0x06064b50): 0x0' + vars.signature.toString(16));
}
return vars;
}
module.exports = function centralDirectory(source, options) {
const endDir = PullStream();
const records = PullStream();
const tailSize = (options && options.tailSize) || 80;
let sourceSize,
crxHeader,
startOffset,
vars;
if (options && options.crx)
crxHeader = getCrxHeader(source);
return source.size()
.then(function(size) {
sourceSize = size;
source.stream(Math.max(0, size-tailSize))
.on('error', function (error) { endDir.emit('error', error); })
.pipe(endDir);
return endDir.pull(signature);
})
.then(function() {
return Bluebird.props({directory: endDir.pull(22), crxHeader: crxHeader});
})
.then(function(d) {
const data = d.directory;
startOffset = d.crxHeader && d.crxHeader.size || 0;
vars = parseBuffer.parse(data, [
['signature', 4],
['diskNumber', 2],
['diskStart', 2],
['numberOfRecordsOnDisk', 2],
['numberOfRecords', 2],
['sizeOfCentralDirectory', 4],
['offsetToStartOfCentralDirectory', 4],
['commentLength', 2],
]);
// Is this zip file using zip64 format? Use same check as Go:
// https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503
// For zip64 files, need to find zip64 central directory locator header to extract
// relative offset for zip64 central directory record.
if (vars.diskNumber == 0xffff || vars.numberOfRecords == 0xffff ||
vars.offsetToStartOfCentralDirectory == 0xffffffff) {
// Offset to zip64 CDL is 20 bytes before normal CDR
const zip64CDLSize = 20;
const zip64CDLOffset = sourceSize - (tailSize - endDir.match + zip64CDLSize);
const zip64CDLStream = PullStream();
source.stream(zip64CDLOffset).pipe(zip64CDLStream);
return zip64CDLStream.pull(zip64CDLSize)
.then(function (d) { return getZip64CentralDirectory(source, d); })
.then(function (dir64record) {
vars = parseZip64DirRecord(dir64record);
});
} else {
vars.offsetToStartOfCentralDirectory += startOffset;
}
})
.then(function() {
if (vars.commentLength) return endDir.pull(vars.commentLength).then(function(comment) {
vars.comment = comment.toString('utf8');
});
})
.then(function() {
source.stream(vars.offsetToStartOfCentralDirectory).pipe(records);
vars.extract = function(opts) {
if (!opts || !opts.path) throw new Error('PATH_MISSING');
// make sure path is normalized before using it
opts.path = path.resolve(path.normalize(opts.path));
return vars.files.then(function(files) {
return Bluebird.map(files, async function(entry) {
// to avoid zip slip (writing outside of the destination), we resolve
// the target path, and make sure it's nested in the intended
// destination, or not extract it otherwise.
const extractPath = path.join(opts.path, entry.path);
if (extractPath.indexOf(opts.path) != 0) {
return;
}
if (entry.type == 'Directory') {
await fs.ensureDir(extractPath);
return;
}
await fs.ensureDir(path.dirname(extractPath));
const writer = opts.getWriter ? opts.getWriter({path: extractPath}) : fs.createWriteStream(extractPath);
return new Promise(function(resolve, reject) {
entry.stream(opts.password)
.on('error', reject)
.pipe(writer)
.on('close', resolve)
.on('error', reject);
});
}, { concurrency: opts.concurrency > 1 ? opts.concurrency : 1 });
});
};
vars.files = Bluebird.mapSeries(Array(vars.numberOfRecords), function() {
return records.pull(46).then(function(data) {
const vars = parseBuffer.parse(data, [
['signature', 4],
['versionMadeBy', 2],
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
['fileCommentLength', 2],
['diskNumber', 2],
['internalFileAttributes', 2],
['externalFileAttributes', 4],
['offsetToLocalFileHeader', 4],
]);
vars.offsetToLocalFileHeader += startOffset;
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return records.pull(vars.fileNameLength).then(function(fileNameBuffer) {
vars.pathBuffer = fileNameBuffer;
vars.path = fileNameBuffer.toString('utf8');
vars.isUnicode = (vars.flags & 0x800) != 0;
return records.pull(vars.extraFieldLength);
})
.then(function(extraField) {
vars.extra = parseExtraField(extraField, vars);
return records.pull(vars.fileCommentLength);
})
.then(function(comment) {
vars.comment = comment;
vars.type = (vars.uncompressedSize === 0 && /[/\\]$/.test(vars.path)) ? 'Directory' : 'File';
const padding = options && options.padding || 1000;
vars.stream = function(_password) {
const totalSize = 30
+ padding // add an extra buffer
+ (vars.extraFieldLength || 0)
+ (vars.fileNameLength || 0)
+ vars.compressedSize;
return unzip(source, vars.offsetToLocalFileHeader, _password, vars, totalSize);
};
vars.buffer = function(_password) {
return BufferStream(vars.stream(_password));
};
return vars;
});
});
});
return Bluebird.props(vars);
});
};

141
resources/app/node_modules/unzipper/lib/Open/index.js generated vendored Normal file
View File

@@ -0,0 +1,141 @@
const fs = require('graceful-fs');
const directory = require('./directory');
const Stream = require('stream');
module.exports = {
buffer: function(buffer, options) {
const source = {
stream: function(offset, length) {
const stream = Stream.PassThrough();
const end = length ? offset + length : undefined;
stream.end(buffer.slice(offset, end));
return stream;
},
size: function() {
return Promise.resolve(buffer.length);
}
};
return directory(source, options);
},
file: function(filename, options) {
const source = {
stream: function(start, length) {
const end = length ? start + length : undefined;
return fs.createReadStream(filename, {start, end});
},
size: function() {
return new Promise(function(resolve, reject) {
fs.stat(filename, function(err, d) {
if (err)
reject(err);
else
resolve(d.size);
});
});
}
};
return directory(source, options);
},
url: function(request, params, options) {
if (typeof params === 'string')
params = {url: params};
if (!params.url)
throw 'URL missing';
params.headers = params.headers || {};
const source = {
stream : function(offset, length) {
const options = Object.create(params);
const end = length ? offset + length : '';
options.headers = Object.create(params.headers);
options.headers.range = 'bytes='+offset+'-' + end;
return request(options);
},
size: function() {
return new Promise(function(resolve, reject) {
const req = request(params);
req.on('response', function(d) {
req.abort();
if (!d.headers['content-length'])
reject(new Error('Missing content length header'));
else
resolve(d.headers['content-length']);
}).on('error', reject);
});
}
};
return directory(source, options);
},
s3 : function(client, params, options) {
const source = {
size: function() {
return new Promise(function(resolve, reject) {
client.headObject(params, function(err, d) {
if (err)
reject(err);
else
resolve(d.ContentLength);
});
});
},
stream: function(offset, length) {
const d = {};
for (const key in params)
d[key] = params[key];
const end = length ? offset + length : '';
d.Range = 'bytes='+offset+'-' + end;
return client.getObject(d).createReadStream();
}
};
return directory(source, options);
},
s3_v3: function (client, params, options) {
//@ts-ignore
const { GetObjectCommand, HeadObjectCommand } = require('@aws-sdk/client-s3');
const source = {
size: async () => {
const head = await client.send(
new HeadObjectCommand({
Bucket: params.Bucket,
Key: params.Key,
})
);
if(!head.ContentLength) {
return 0;
}
return head.ContentLength;
},
stream: (offset, length) => {
const stream = Stream.PassThrough();
const end = length ? offset + length : "";
client
.send(
new GetObjectCommand({
Bucket: params.Bucket,
Key: params.Key,
Range: `bytes=${offset}-${end}`,
})
)
.then((response) => {
response.Body.pipe(stream);
})
.catch((error) => {
stream.emit("error", error);
});
return stream;
},
};
return directory(source, options);
},
custom: function(source, options) {
return directory(source, options);
}
};

120
resources/app/node_modules/unzipper/lib/Open/unzip.js generated vendored Normal file
View File

@@ -0,0 +1,120 @@
const Decrypt = require('../Decrypt');
const PullStream = require('../PullStream');
const Stream = require('stream');
const zlib = require('zlib');
const parseExtraField = require('../parseExtraField');
const parseDateTime = require('../parseDateTime');
const parseBuffer = require('../parseBuffer');
module.exports = function unzip(source, offset, _password, directoryVars, length) {
const file = PullStream(),
entry = Stream.PassThrough();
const req = source.stream(offset, length);
req.pipe(file).on('error', function(e) {
entry.emit('error', e);
});
entry.vars = file.pull(30)
.then(function(data) {
let vars = parseBuffer.parse(data, [
['signature', 4],
['versionsNeededToExtract', 2],
['flags', 2],
['compressionMethod', 2],
['lastModifiedTime', 2],
['lastModifiedDate', 2],
['crc32', 4],
['compressedSize', 4],
['uncompressedSize', 4],
['fileNameLength', 2],
['extraFieldLength', 2],
]);
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
return file.pull(vars.fileNameLength)
.then(function(fileName) {
vars.fileName = fileName.toString('utf8');
return file.pull(vars.extraFieldLength);
})
.then(function(extraField) {
let checkEncryption;
vars.extra = parseExtraField(extraField, vars);
// Ignore logal file header vars if the directory vars are available
if (directoryVars && directoryVars.compressedSize) vars = directoryVars;
if (vars.flags & 0x01) checkEncryption = file.pull(12)
.then(function(header) {
if (!_password)
throw new Error('MISSING_PASSWORD');
const decrypt = Decrypt();
String(_password).split('').forEach(function(d) {
decrypt.update(d);
});
for (let i=0; i < header.length; i++)
header[i] = decrypt.decryptByte(header[i]);
vars.decrypt = decrypt;
vars.compressedSize -= 12;
const check = (vars.flags & 0x8) ? (vars.lastModifiedTime >> 8) & 0xff : (vars.crc32 >> 24) & 0xff;
if (header[11] !== check)
throw new Error('BAD_PASSWORD');
return vars;
});
return Promise.resolve(checkEncryption)
.then(function() {
entry.emit('vars', vars);
return vars;
});
});
});
entry.vars.then(function(vars) {
const fileSizeKnown = !(vars.flags & 0x08) || vars.compressedSize > 0;
let eof;
const inflater = vars.compressionMethod ? zlib.createInflateRaw() : Stream.PassThrough();
if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
eof = vars.compressedSize;
} else {
eof = Buffer.alloc(4);
eof.writeUInt32LE(0x08074b50, 0);
}
let stream = file.stream(eof);
if (vars.decrypt)
stream = stream.pipe(vars.decrypt.stream());
stream
.pipe(inflater)
.on('error', function(err) { entry.emit('error', err);})
.pipe(entry)
.on('finish', function() {
if(req.destroy)
req.destroy();
else if (req.abort)
req.abort();
else if (req.close)
req.close();
else if (req.push)
req.push();
else
console.log('warning - unable to close stream');
});
})
.catch(function(e) {
entry.emit('error', e);
});
return entry;
};