Initial
This commit is contained in:
21
resources/app/node_modules/tar-stream/LICENSE
generated
vendored
Normal file
21
resources/app/node_modules/tar-stream/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Mathias Buus
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
14
resources/app/node_modules/tar-stream/constants.js
generated
vendored
Normal file
14
resources/app/node_modules/tar-stream/constants.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
const constants = { // just for envs without fs
|
||||
S_IFMT: 61440,
|
||||
S_IFDIR: 16384,
|
||||
S_IFCHR: 8192,
|
||||
S_IFBLK: 24576,
|
||||
S_IFIFO: 4096,
|
||||
S_IFLNK: 40960
|
||||
}
|
||||
|
||||
try {
|
||||
module.exports = require('fs').constants || constants
|
||||
} catch {
|
||||
module.exports = constants
|
||||
}
|
||||
406
resources/app/node_modules/tar-stream/extract.js
generated
vendored
Normal file
406
resources/app/node_modules/tar-stream/extract.js
generated
vendored
Normal file
@@ -0,0 +1,406 @@
|
||||
const { Writable, Readable, getStreamError } = require('streamx')
|
||||
const FIFO = require('fast-fifo')
|
||||
const b4a = require('b4a')
|
||||
const headers = require('./headers')
|
||||
|
||||
const EMPTY = b4a.alloc(0)
|
||||
|
||||
class BufferList {
|
||||
constructor () {
|
||||
this.buffered = 0
|
||||
this.shifted = 0
|
||||
this.queue = new FIFO()
|
||||
|
||||
this._offset = 0
|
||||
}
|
||||
|
||||
push (buffer) {
|
||||
this.buffered += buffer.byteLength
|
||||
this.queue.push(buffer)
|
||||
}
|
||||
|
||||
shiftFirst (size) {
|
||||
return this._buffered === 0 ? null : this._next(size)
|
||||
}
|
||||
|
||||
shift (size) {
|
||||
if (size > this.buffered) return null
|
||||
if (size === 0) return EMPTY
|
||||
|
||||
let chunk = this._next(size)
|
||||
|
||||
if (size === chunk.byteLength) return chunk // likely case
|
||||
|
||||
const chunks = [chunk]
|
||||
|
||||
while ((size -= chunk.byteLength) > 0) {
|
||||
chunk = this._next(size)
|
||||
chunks.push(chunk)
|
||||
}
|
||||
|
||||
return b4a.concat(chunks)
|
||||
}
|
||||
|
||||
_next (size) {
|
||||
const buf = this.queue.peek()
|
||||
const rem = buf.byteLength - this._offset
|
||||
|
||||
if (size >= rem) {
|
||||
const sub = this._offset ? buf.subarray(this._offset, buf.byteLength) : buf
|
||||
this.queue.shift()
|
||||
this._offset = 0
|
||||
this.buffered -= rem
|
||||
this.shifted += rem
|
||||
return sub
|
||||
}
|
||||
|
||||
this.buffered -= size
|
||||
this.shifted += size
|
||||
|
||||
return buf.subarray(this._offset, (this._offset += size))
|
||||
}
|
||||
}
|
||||
|
||||
class Source extends Readable {
|
||||
constructor (self, header, offset) {
|
||||
super()
|
||||
|
||||
this.header = header
|
||||
this.offset = offset
|
||||
|
||||
this._parent = self
|
||||
}
|
||||
|
||||
_read (cb) {
|
||||
if (this.header.size === 0) {
|
||||
this.push(null)
|
||||
}
|
||||
if (this._parent._stream === this) {
|
||||
this._parent._update()
|
||||
}
|
||||
cb(null)
|
||||
}
|
||||
|
||||
_predestroy () {
|
||||
this._parent.destroy(getStreamError(this))
|
||||
}
|
||||
|
||||
_detach () {
|
||||
if (this._parent._stream === this) {
|
||||
this._parent._stream = null
|
||||
this._parent._missing = overflow(this.header.size)
|
||||
this._parent._update()
|
||||
}
|
||||
}
|
||||
|
||||
_destroy (cb) {
|
||||
this._detach()
|
||||
cb(null)
|
||||
}
|
||||
}
|
||||
|
||||
class Extract extends Writable {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
|
||||
if (!opts) opts = {}
|
||||
|
||||
this._buffer = new BufferList()
|
||||
this._offset = 0
|
||||
this._header = null
|
||||
this._stream = null
|
||||
this._missing = 0
|
||||
this._longHeader = false
|
||||
this._callback = noop
|
||||
this._locked = false
|
||||
this._finished = false
|
||||
this._pax = null
|
||||
this._paxGlobal = null
|
||||
this._gnuLongPath = null
|
||||
this._gnuLongLinkPath = null
|
||||
this._filenameEncoding = opts.filenameEncoding || 'utf-8'
|
||||
this._allowUnknownFormat = !!opts.allowUnknownFormat
|
||||
this._unlockBound = this._unlock.bind(this)
|
||||
}
|
||||
|
||||
_unlock (err) {
|
||||
this._locked = false
|
||||
|
||||
if (err) {
|
||||
this.destroy(err)
|
||||
this._continueWrite(err)
|
||||
return
|
||||
}
|
||||
|
||||
this._update()
|
||||
}
|
||||
|
||||
_consumeHeader () {
|
||||
if (this._locked) return false
|
||||
|
||||
this._offset = this._buffer.shifted
|
||||
|
||||
try {
|
||||
this._header = headers.decode(this._buffer.shift(512), this._filenameEncoding, this._allowUnknownFormat)
|
||||
} catch (err) {
|
||||
this._continueWrite(err)
|
||||
return false
|
||||
}
|
||||
|
||||
if (!this._header) return true
|
||||
|
||||
switch (this._header.type) {
|
||||
case 'gnu-long-path':
|
||||
case 'gnu-long-link-path':
|
||||
case 'pax-global-header':
|
||||
case 'pax-header':
|
||||
this._longHeader = true
|
||||
this._missing = this._header.size
|
||||
return true
|
||||
}
|
||||
|
||||
this._locked = true
|
||||
this._applyLongHeaders()
|
||||
|
||||
if (this._header.size === 0 || this._header.type === 'directory') {
|
||||
this.emit('entry', this._header, this._createStream(), this._unlockBound)
|
||||
return true
|
||||
}
|
||||
|
||||
this._stream = this._createStream()
|
||||
this._missing = this._header.size
|
||||
|
||||
this.emit('entry', this._header, this._stream, this._unlockBound)
|
||||
return true
|
||||
}
|
||||
|
||||
_applyLongHeaders () {
|
||||
if (this._gnuLongPath) {
|
||||
this._header.name = this._gnuLongPath
|
||||
this._gnuLongPath = null
|
||||
}
|
||||
|
||||
if (this._gnuLongLinkPath) {
|
||||
this._header.linkname = this._gnuLongLinkPath
|
||||
this._gnuLongLinkPath = null
|
||||
}
|
||||
|
||||
if (this._pax) {
|
||||
if (this._pax.path) this._header.name = this._pax.path
|
||||
if (this._pax.linkpath) this._header.linkname = this._pax.linkpath
|
||||
if (this._pax.size) this._header.size = parseInt(this._pax.size, 10)
|
||||
this._header.pax = this._pax
|
||||
this._pax = null
|
||||
}
|
||||
}
|
||||
|
||||
_decodeLongHeader (buf) {
|
||||
switch (this._header.type) {
|
||||
case 'gnu-long-path':
|
||||
this._gnuLongPath = headers.decodeLongPath(buf, this._filenameEncoding)
|
||||
break
|
||||
case 'gnu-long-link-path':
|
||||
this._gnuLongLinkPath = headers.decodeLongPath(buf, this._filenameEncoding)
|
||||
break
|
||||
case 'pax-global-header':
|
||||
this._paxGlobal = headers.decodePax(buf)
|
||||
break
|
||||
case 'pax-header':
|
||||
this._pax = this._paxGlobal === null
|
||||
? headers.decodePax(buf)
|
||||
: Object.assign({}, this._paxGlobal, headers.decodePax(buf))
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
_consumeLongHeader () {
|
||||
this._longHeader = false
|
||||
this._missing = overflow(this._header.size)
|
||||
|
||||
const buf = this._buffer.shift(this._header.size)
|
||||
|
||||
try {
|
||||
this._decodeLongHeader(buf)
|
||||
} catch (err) {
|
||||
this._continueWrite(err)
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
_consumeStream () {
|
||||
const buf = this._buffer.shiftFirst(this._missing)
|
||||
if (buf === null) return false
|
||||
|
||||
this._missing -= buf.byteLength
|
||||
const drained = this._stream.push(buf)
|
||||
|
||||
if (this._missing === 0) {
|
||||
this._stream.push(null)
|
||||
if (drained) this._stream._detach()
|
||||
return drained && this._locked === false
|
||||
}
|
||||
|
||||
return drained
|
||||
}
|
||||
|
||||
_createStream () {
|
||||
return new Source(this, this._header, this._offset)
|
||||
}
|
||||
|
||||
_update () {
|
||||
while (this._buffer.buffered > 0 && !this.destroying) {
|
||||
if (this._missing > 0) {
|
||||
if (this._stream !== null) {
|
||||
if (this._consumeStream() === false) return
|
||||
continue
|
||||
}
|
||||
|
||||
if (this._longHeader === true) {
|
||||
if (this._missing > this._buffer.buffered) break
|
||||
if (this._consumeLongHeader() === false) return false
|
||||
continue
|
||||
}
|
||||
|
||||
const ignore = this._buffer.shiftFirst(this._missing)
|
||||
if (ignore !== null) this._missing -= ignore.byteLength
|
||||
continue
|
||||
}
|
||||
|
||||
if (this._buffer.buffered < 512) break
|
||||
if (this._stream !== null || this._consumeHeader() === false) return
|
||||
}
|
||||
|
||||
this._continueWrite(null)
|
||||
}
|
||||
|
||||
_continueWrite (err) {
|
||||
const cb = this._callback
|
||||
this._callback = noop
|
||||
cb(err)
|
||||
}
|
||||
|
||||
_write (data, cb) {
|
||||
this._callback = cb
|
||||
this._buffer.push(data)
|
||||
this._update()
|
||||
}
|
||||
|
||||
_final (cb) {
|
||||
this._finished = this._missing === 0 && this._buffer.buffered === 0
|
||||
cb(this._finished ? null : new Error('Unexpected end of data'))
|
||||
}
|
||||
|
||||
_predestroy () {
|
||||
this._continueWrite(null)
|
||||
}
|
||||
|
||||
_destroy (cb) {
|
||||
if (this._stream) this._stream.destroy(getStreamError(this))
|
||||
cb(null)
|
||||
}
|
||||
|
||||
[Symbol.asyncIterator] () {
|
||||
let error = null
|
||||
|
||||
let promiseResolve = null
|
||||
let promiseReject = null
|
||||
|
||||
let entryStream = null
|
||||
let entryCallback = null
|
||||
|
||||
const extract = this
|
||||
|
||||
this.on('entry', onentry)
|
||||
this.on('error', (err) => { error = err })
|
||||
this.on('close', onclose)
|
||||
|
||||
return {
|
||||
[Symbol.asyncIterator] () {
|
||||
return this
|
||||
},
|
||||
next () {
|
||||
return new Promise(onnext)
|
||||
},
|
||||
return () {
|
||||
return destroy(null)
|
||||
},
|
||||
throw (err) {
|
||||
return destroy(err)
|
||||
}
|
||||
}
|
||||
|
||||
function consumeCallback (err) {
|
||||
if (!entryCallback) return
|
||||
const cb = entryCallback
|
||||
entryCallback = null
|
||||
cb(err)
|
||||
}
|
||||
|
||||
function onnext (resolve, reject) {
|
||||
if (error) {
|
||||
return reject(error)
|
||||
}
|
||||
|
||||
if (entryStream) {
|
||||
resolve({ value: entryStream, done: false })
|
||||
entryStream = null
|
||||
return
|
||||
}
|
||||
|
||||
promiseResolve = resolve
|
||||
promiseReject = reject
|
||||
|
||||
consumeCallback(null)
|
||||
|
||||
if (extract._finished && promiseResolve) {
|
||||
promiseResolve({ value: undefined, done: true })
|
||||
promiseResolve = promiseReject = null
|
||||
}
|
||||
}
|
||||
|
||||
function onentry (header, stream, callback) {
|
||||
entryCallback = callback
|
||||
stream.on('error', noop) // no way around this due to tick sillyness
|
||||
|
||||
if (promiseResolve) {
|
||||
promiseResolve({ value: stream, done: false })
|
||||
promiseResolve = promiseReject = null
|
||||
} else {
|
||||
entryStream = stream
|
||||
}
|
||||
}
|
||||
|
||||
function onclose () {
|
||||
consumeCallback(error)
|
||||
if (!promiseResolve) return
|
||||
if (error) promiseReject(error)
|
||||
else promiseResolve({ value: undefined, done: true })
|
||||
promiseResolve = promiseReject = null
|
||||
}
|
||||
|
||||
function destroy (err) {
|
||||
extract.destroy(err)
|
||||
consumeCallback(err)
|
||||
return new Promise((resolve, reject) => {
|
||||
if (extract.destroyed) return resolve({ value: undefined, done: true })
|
||||
extract.once('close', function () {
|
||||
if (err) reject(err)
|
||||
else resolve({ value: undefined, done: true })
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function extract (opts) {
|
||||
return new Extract(opts)
|
||||
}
|
||||
|
||||
function noop () {}
|
||||
|
||||
function overflow (size) {
|
||||
size &= 511
|
||||
return size && 512 - size
|
||||
}
|
||||
321
resources/app/node_modules/tar-stream/headers.js
generated
vendored
Normal file
321
resources/app/node_modules/tar-stream/headers.js
generated
vendored
Normal file
@@ -0,0 +1,321 @@
|
||||
const b4a = require('b4a')
|
||||
|
||||
const ZEROS = '0000000000000000000'
|
||||
const SEVENS = '7777777777777777777'
|
||||
const ZERO_OFFSET = '0'.charCodeAt(0)
|
||||
const USTAR_MAGIC = b4a.from([0x75, 0x73, 0x74, 0x61, 0x72, 0x00]) // ustar\x00
|
||||
const USTAR_VER = b4a.from([ZERO_OFFSET, ZERO_OFFSET])
|
||||
const GNU_MAGIC = b4a.from([0x75, 0x73, 0x74, 0x61, 0x72, 0x20]) // ustar\x20
|
||||
const GNU_VER = b4a.from([0x20, 0x00])
|
||||
const MASK = 0o7777
|
||||
const MAGIC_OFFSET = 257
|
||||
const VERSION_OFFSET = 263
|
||||
|
||||
exports.decodeLongPath = function decodeLongPath (buf, encoding) {
|
||||
return decodeStr(buf, 0, buf.length, encoding)
|
||||
}
|
||||
|
||||
exports.encodePax = function encodePax (opts) { // TODO: encode more stuff in pax
|
||||
let result = ''
|
||||
if (opts.name) result += addLength(' path=' + opts.name + '\n')
|
||||
if (opts.linkname) result += addLength(' linkpath=' + opts.linkname + '\n')
|
||||
const pax = opts.pax
|
||||
if (pax) {
|
||||
for (const key in pax) {
|
||||
result += addLength(' ' + key + '=' + pax[key] + '\n')
|
||||
}
|
||||
}
|
||||
return b4a.from(result)
|
||||
}
|
||||
|
||||
exports.decodePax = function decodePax (buf) {
|
||||
const result = {}
|
||||
|
||||
while (buf.length) {
|
||||
let i = 0
|
||||
while (i < buf.length && buf[i] !== 32) i++
|
||||
const len = parseInt(b4a.toString(buf.subarray(0, i)), 10)
|
||||
if (!len) return result
|
||||
|
||||
const b = b4a.toString(buf.subarray(i + 1, len - 1))
|
||||
const keyIndex = b.indexOf('=')
|
||||
if (keyIndex === -1) return result
|
||||
result[b.slice(0, keyIndex)] = b.slice(keyIndex + 1)
|
||||
|
||||
buf = buf.subarray(len)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
exports.encode = function encode (opts) {
|
||||
const buf = b4a.alloc(512)
|
||||
let name = opts.name
|
||||
let prefix = ''
|
||||
|
||||
if (opts.typeflag === 5 && name[name.length - 1] !== '/') name += '/'
|
||||
if (b4a.byteLength(name) !== name.length) return null // utf-8
|
||||
|
||||
while (b4a.byteLength(name) > 100) {
|
||||
const i = name.indexOf('/')
|
||||
if (i === -1) return null
|
||||
prefix += prefix ? '/' + name.slice(0, i) : name.slice(0, i)
|
||||
name = name.slice(i + 1)
|
||||
}
|
||||
|
||||
if (b4a.byteLength(name) > 100 || b4a.byteLength(prefix) > 155) return null
|
||||
if (opts.linkname && b4a.byteLength(opts.linkname) > 100) return null
|
||||
|
||||
b4a.write(buf, name)
|
||||
b4a.write(buf, encodeOct(opts.mode & MASK, 6), 100)
|
||||
b4a.write(buf, encodeOct(opts.uid, 6), 108)
|
||||
b4a.write(buf, encodeOct(opts.gid, 6), 116)
|
||||
encodeSize(opts.size, buf, 124)
|
||||
b4a.write(buf, encodeOct((opts.mtime.getTime() / 1000) | 0, 11), 136)
|
||||
|
||||
buf[156] = ZERO_OFFSET + toTypeflag(opts.type)
|
||||
|
||||
if (opts.linkname) b4a.write(buf, opts.linkname, 157)
|
||||
|
||||
b4a.copy(USTAR_MAGIC, buf, MAGIC_OFFSET)
|
||||
b4a.copy(USTAR_VER, buf, VERSION_OFFSET)
|
||||
if (opts.uname) b4a.write(buf, opts.uname, 265)
|
||||
if (opts.gname) b4a.write(buf, opts.gname, 297)
|
||||
b4a.write(buf, encodeOct(opts.devmajor || 0, 6), 329)
|
||||
b4a.write(buf, encodeOct(opts.devminor || 0, 6), 337)
|
||||
|
||||
if (prefix) b4a.write(buf, prefix, 345)
|
||||
|
||||
b4a.write(buf, encodeOct(cksum(buf), 6), 148)
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
exports.decode = function decode (buf, filenameEncoding, allowUnknownFormat) {
|
||||
let typeflag = buf[156] === 0 ? 0 : buf[156] - ZERO_OFFSET
|
||||
|
||||
let name = decodeStr(buf, 0, 100, filenameEncoding)
|
||||
const mode = decodeOct(buf, 100, 8)
|
||||
const uid = decodeOct(buf, 108, 8)
|
||||
const gid = decodeOct(buf, 116, 8)
|
||||
const size = decodeOct(buf, 124, 12)
|
||||
const mtime = decodeOct(buf, 136, 12)
|
||||
const type = toType(typeflag)
|
||||
const linkname = buf[157] === 0 ? null : decodeStr(buf, 157, 100, filenameEncoding)
|
||||
const uname = decodeStr(buf, 265, 32)
|
||||
const gname = decodeStr(buf, 297, 32)
|
||||
const devmajor = decodeOct(buf, 329, 8)
|
||||
const devminor = decodeOct(buf, 337, 8)
|
||||
|
||||
const c = cksum(buf)
|
||||
|
||||
// checksum is still initial value if header was null.
|
||||
if (c === 8 * 32) return null
|
||||
|
||||
// valid checksum
|
||||
if (c !== decodeOct(buf, 148, 8)) throw new Error('Invalid tar header. Maybe the tar is corrupted or it needs to be gunzipped?')
|
||||
|
||||
if (isUSTAR(buf)) {
|
||||
// ustar (posix) format.
|
||||
// prepend prefix, if present.
|
||||
if (buf[345]) name = decodeStr(buf, 345, 155, filenameEncoding) + '/' + name
|
||||
} else if (isGNU(buf)) {
|
||||
// 'gnu'/'oldgnu' format. Similar to ustar, but has support for incremental and
|
||||
// multi-volume tarballs.
|
||||
} else {
|
||||
if (!allowUnknownFormat) {
|
||||
throw new Error('Invalid tar header: unknown format.')
|
||||
}
|
||||
}
|
||||
|
||||
// to support old tar versions that use trailing / to indicate dirs
|
||||
if (typeflag === 0 && name && name[name.length - 1] === '/') typeflag = 5
|
||||
|
||||
return {
|
||||
name,
|
||||
mode,
|
||||
uid,
|
||||
gid,
|
||||
size,
|
||||
mtime: new Date(1000 * mtime),
|
||||
type,
|
||||
linkname,
|
||||
uname,
|
||||
gname,
|
||||
devmajor,
|
||||
devminor,
|
||||
pax: null
|
||||
}
|
||||
}
|
||||
|
||||
function isUSTAR (buf) {
|
||||
return b4a.equals(USTAR_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6))
|
||||
}
|
||||
|
||||
function isGNU (buf) {
|
||||
return b4a.equals(GNU_MAGIC, buf.subarray(MAGIC_OFFSET, MAGIC_OFFSET + 6)) &&
|
||||
b4a.equals(GNU_VER, buf.subarray(VERSION_OFFSET, VERSION_OFFSET + 2))
|
||||
}
|
||||
|
||||
function clamp (index, len, defaultValue) {
|
||||
if (typeof index !== 'number') return defaultValue
|
||||
index = ~~index // Coerce to integer.
|
||||
if (index >= len) return len
|
||||
if (index >= 0) return index
|
||||
index += len
|
||||
if (index >= 0) return index
|
||||
return 0
|
||||
}
|
||||
|
||||
function toType (flag) {
|
||||
switch (flag) {
|
||||
case 0:
|
||||
return 'file'
|
||||
case 1:
|
||||
return 'link'
|
||||
case 2:
|
||||
return 'symlink'
|
||||
case 3:
|
||||
return 'character-device'
|
||||
case 4:
|
||||
return 'block-device'
|
||||
case 5:
|
||||
return 'directory'
|
||||
case 6:
|
||||
return 'fifo'
|
||||
case 7:
|
||||
return 'contiguous-file'
|
||||
case 72:
|
||||
return 'pax-header'
|
||||
case 55:
|
||||
return 'pax-global-header'
|
||||
case 27:
|
||||
return 'gnu-long-link-path'
|
||||
case 28:
|
||||
case 30:
|
||||
return 'gnu-long-path'
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
function toTypeflag (flag) {
|
||||
switch (flag) {
|
||||
case 'file':
|
||||
return 0
|
||||
case 'link':
|
||||
return 1
|
||||
case 'symlink':
|
||||
return 2
|
||||
case 'character-device':
|
||||
return 3
|
||||
case 'block-device':
|
||||
return 4
|
||||
case 'directory':
|
||||
return 5
|
||||
case 'fifo':
|
||||
return 6
|
||||
case 'contiguous-file':
|
||||
return 7
|
||||
case 'pax-header':
|
||||
return 72
|
||||
}
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function indexOf (block, num, offset, end) {
|
||||
for (; offset < end; offset++) {
|
||||
if (block[offset] === num) return offset
|
||||
}
|
||||
return end
|
||||
}
|
||||
|
||||
function cksum (block) {
|
||||
let sum = 8 * 32
|
||||
for (let i = 0; i < 148; i++) sum += block[i]
|
||||
for (let j = 156; j < 512; j++) sum += block[j]
|
||||
return sum
|
||||
}
|
||||
|
||||
function encodeOct (val, n) {
|
||||
val = val.toString(8)
|
||||
if (val.length > n) return SEVENS.slice(0, n) + ' '
|
||||
return ZEROS.slice(0, n - val.length) + val + ' '
|
||||
}
|
||||
|
||||
function encodeSizeBin (num, buf, off) {
|
||||
buf[off] = 0x80
|
||||
for (let i = 11; i > 0; i--) {
|
||||
buf[off + i] = num & 0xff
|
||||
num = Math.floor(num / 0x100)
|
||||
}
|
||||
}
|
||||
|
||||
function encodeSize (num, buf, off) {
|
||||
if (num.toString(8).length > 11) {
|
||||
encodeSizeBin(num, buf, off)
|
||||
} else {
|
||||
b4a.write(buf, encodeOct(num, 11), off)
|
||||
}
|
||||
}
|
||||
|
||||
/* Copied from the node-tar repo and modified to meet
|
||||
* tar-stream coding standard.
|
||||
*
|
||||
* Source: https://github.com/npm/node-tar/blob/51b6627a1f357d2eb433e7378e5f05e83b7aa6cd/lib/header.js#L349
|
||||
*/
|
||||
function parse256 (buf) {
|
||||
// first byte MUST be either 80 or FF
|
||||
// 80 for positive, FF for 2's comp
|
||||
let positive
|
||||
if (buf[0] === 0x80) positive = true
|
||||
else if (buf[0] === 0xFF) positive = false
|
||||
else return null
|
||||
|
||||
// build up a base-256 tuple from the least sig to the highest
|
||||
const tuple = []
|
||||
let i
|
||||
for (i = buf.length - 1; i > 0; i--) {
|
||||
const byte = buf[i]
|
||||
if (positive) tuple.push(byte)
|
||||
else tuple.push(0xFF - byte)
|
||||
}
|
||||
|
||||
let sum = 0
|
||||
const l = tuple.length
|
||||
for (i = 0; i < l; i++) {
|
||||
sum += tuple[i] * Math.pow(256, i)
|
||||
}
|
||||
|
||||
return positive ? sum : -1 * sum
|
||||
}
|
||||
|
||||
function decodeOct (val, offset, length) {
|
||||
val = val.subarray(offset, offset + length)
|
||||
offset = 0
|
||||
|
||||
// If prefixed with 0x80 then parse as a base-256 integer
|
||||
if (val[offset] & 0x80) {
|
||||
return parse256(val)
|
||||
} else {
|
||||
// Older versions of tar can prefix with spaces
|
||||
while (offset < val.length && val[offset] === 32) offset++
|
||||
const end = clamp(indexOf(val, 32, offset, val.length), val.length, val.length)
|
||||
while (offset < end && val[offset] === 0) offset++
|
||||
if (end === offset) return 0
|
||||
return parseInt(b4a.toString(val.subarray(offset, end)), 8)
|
||||
}
|
||||
}
|
||||
|
||||
function decodeStr (val, offset, length, encoding) {
|
||||
return b4a.toString(val.subarray(offset, indexOf(val, 0, offset, offset + length)), encoding)
|
||||
}
|
||||
|
||||
function addLength (str) {
|
||||
const len = b4a.byteLength(str)
|
||||
let digits = Math.floor(Math.log(len) / Math.log(10)) + 1
|
||||
if (len + digits >= Math.pow(10, digits)) digits++
|
||||
|
||||
return (len + digits) + str
|
||||
}
|
||||
2
resources/app/node_modules/tar-stream/index.js
generated
vendored
Normal file
2
resources/app/node_modules/tar-stream/index.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
exports.extract = require('./extract')
|
||||
exports.pack = require('./pack')
|
||||
287
resources/app/node_modules/tar-stream/pack.js
generated
vendored
Normal file
287
resources/app/node_modules/tar-stream/pack.js
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
const { Readable, Writable, getStreamError } = require('streamx')
|
||||
const b4a = require('b4a')
|
||||
|
||||
const constants = require('./constants')
|
||||
const headers = require('./headers')
|
||||
|
||||
const DMODE = 0o755
|
||||
const FMODE = 0o644
|
||||
|
||||
const END_OF_TAR = b4a.alloc(1024)
|
||||
|
||||
class Sink extends Writable {
|
||||
constructor (pack, header, callback) {
|
||||
super({ mapWritable, eagerOpen: true })
|
||||
|
||||
this.written = 0
|
||||
this.header = header
|
||||
|
||||
this._callback = callback
|
||||
this._linkname = null
|
||||
this._isLinkname = header.type === 'symlink' && !header.linkname
|
||||
this._isVoid = header.type !== 'file' && header.type !== 'contiguous-file'
|
||||
this._finished = false
|
||||
this._pack = pack
|
||||
this._openCallback = null
|
||||
|
||||
if (this._pack._stream === null) this._pack._stream = this
|
||||
else this._pack._pending.push(this)
|
||||
}
|
||||
|
||||
_open (cb) {
|
||||
this._openCallback = cb
|
||||
if (this._pack._stream === this) this._continueOpen()
|
||||
}
|
||||
|
||||
_continuePack (err) {
|
||||
if (this._callback === null) return
|
||||
|
||||
const callback = this._callback
|
||||
this._callback = null
|
||||
|
||||
callback(err)
|
||||
}
|
||||
|
||||
_continueOpen () {
|
||||
if (this._pack._stream === null) this._pack._stream = this
|
||||
|
||||
const cb = this._openCallback
|
||||
this._openCallback = null
|
||||
if (cb === null) return
|
||||
|
||||
if (this._pack.destroying) return cb(new Error('pack stream destroyed'))
|
||||
if (this._pack._finalized) return cb(new Error('pack stream is already finalized'))
|
||||
|
||||
this._pack._stream = this
|
||||
|
||||
if (!this._isLinkname) {
|
||||
this._pack._encode(this.header)
|
||||
}
|
||||
|
||||
if (this._isVoid) {
|
||||
this._finish()
|
||||
this._continuePack(null)
|
||||
}
|
||||
|
||||
cb(null)
|
||||
}
|
||||
|
||||
_write (data, cb) {
|
||||
if (this._isLinkname) {
|
||||
this._linkname = this._linkname ? b4a.concat([this._linkname, data]) : data
|
||||
return cb(null)
|
||||
}
|
||||
|
||||
if (this._isVoid) {
|
||||
if (data.byteLength > 0) {
|
||||
return cb(new Error('No body allowed for this entry'))
|
||||
}
|
||||
return cb()
|
||||
}
|
||||
|
||||
this.written += data.byteLength
|
||||
if (this._pack.push(data)) return cb()
|
||||
this._pack._drain = cb
|
||||
}
|
||||
|
||||
_finish () {
|
||||
if (this._finished) return
|
||||
this._finished = true
|
||||
|
||||
if (this._isLinkname) {
|
||||
this.header.linkname = this._linkname ? b4a.toString(this._linkname, 'utf-8') : ''
|
||||
this._pack._encode(this.header)
|
||||
}
|
||||
|
||||
overflow(this._pack, this.header.size)
|
||||
|
||||
this._pack._done(this)
|
||||
}
|
||||
|
||||
_final (cb) {
|
||||
if (this.written !== this.header.size) { // corrupting tar
|
||||
return cb(new Error('Size mismatch'))
|
||||
}
|
||||
|
||||
this._finish()
|
||||
cb(null)
|
||||
}
|
||||
|
||||
_getError () {
|
||||
return getStreamError(this) || new Error('tar entry destroyed')
|
||||
}
|
||||
|
||||
_predestroy () {
|
||||
this._pack.destroy(this._getError())
|
||||
}
|
||||
|
||||
_destroy (cb) {
|
||||
this._pack._done(this)
|
||||
|
||||
this._continuePack(this._finished ? null : this._getError())
|
||||
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
class Pack extends Readable {
|
||||
constructor (opts) {
|
||||
super(opts)
|
||||
this._drain = noop
|
||||
this._finalized = false
|
||||
this._finalizing = false
|
||||
this._pending = []
|
||||
this._stream = null
|
||||
}
|
||||
|
||||
entry (header, buffer, callback) {
|
||||
if (this._finalized || this.destroying) throw new Error('already finalized or destroyed')
|
||||
|
||||
if (typeof buffer === 'function') {
|
||||
callback = buffer
|
||||
buffer = null
|
||||
}
|
||||
|
||||
if (!callback) callback = noop
|
||||
|
||||
if (!header.size || header.type === 'symlink') header.size = 0
|
||||
if (!header.type) header.type = modeToType(header.mode)
|
||||
if (!header.mode) header.mode = header.type === 'directory' ? DMODE : FMODE
|
||||
if (!header.uid) header.uid = 0
|
||||
if (!header.gid) header.gid = 0
|
||||
if (!header.mtime) header.mtime = new Date()
|
||||
|
||||
if (typeof buffer === 'string') buffer = b4a.from(buffer)
|
||||
|
||||
const sink = new Sink(this, header, callback)
|
||||
|
||||
if (b4a.isBuffer(buffer)) {
|
||||
header.size = buffer.byteLength
|
||||
sink.write(buffer)
|
||||
sink.end()
|
||||
return sink
|
||||
}
|
||||
|
||||
if (sink._isVoid) {
|
||||
return sink
|
||||
}
|
||||
|
||||
return sink
|
||||
}
|
||||
|
||||
finalize () {
|
||||
if (this._stream || this._pending.length > 0) {
|
||||
this._finalizing = true
|
||||
return
|
||||
}
|
||||
|
||||
if (this._finalized) return
|
||||
this._finalized = true
|
||||
|
||||
this.push(END_OF_TAR)
|
||||
this.push(null)
|
||||
}
|
||||
|
||||
_done (stream) {
|
||||
if (stream !== this._stream) return
|
||||
|
||||
this._stream = null
|
||||
|
||||
if (this._finalizing) this.finalize()
|
||||
if (this._pending.length) this._pending.shift()._continueOpen()
|
||||
}
|
||||
|
||||
_encode (header) {
|
||||
if (!header.pax) {
|
||||
const buf = headers.encode(header)
|
||||
if (buf) {
|
||||
this.push(buf)
|
||||
return
|
||||
}
|
||||
}
|
||||
this._encodePax(header)
|
||||
}
|
||||
|
||||
_encodePax (header) {
|
||||
const paxHeader = headers.encodePax({
|
||||
name: header.name,
|
||||
linkname: header.linkname,
|
||||
pax: header.pax
|
||||
})
|
||||
|
||||
const newHeader = {
|
||||
name: 'PaxHeader',
|
||||
mode: header.mode,
|
||||
uid: header.uid,
|
||||
gid: header.gid,
|
||||
size: paxHeader.byteLength,
|
||||
mtime: header.mtime,
|
||||
type: 'pax-header',
|
||||
linkname: header.linkname && 'PaxHeader',
|
||||
uname: header.uname,
|
||||
gname: header.gname,
|
||||
devmajor: header.devmajor,
|
||||
devminor: header.devminor
|
||||
}
|
||||
|
||||
this.push(headers.encode(newHeader))
|
||||
this.push(paxHeader)
|
||||
overflow(this, paxHeader.byteLength)
|
||||
|
||||
newHeader.size = header.size
|
||||
newHeader.type = header.type
|
||||
this.push(headers.encode(newHeader))
|
||||
}
|
||||
|
||||
_doDrain () {
|
||||
const drain = this._drain
|
||||
this._drain = noop
|
||||
drain()
|
||||
}
|
||||
|
||||
_predestroy () {
|
||||
const err = getStreamError(this)
|
||||
|
||||
if (this._stream) this._stream.destroy(err)
|
||||
|
||||
while (this._pending.length) {
|
||||
const stream = this._pending.shift()
|
||||
stream.destroy(err)
|
||||
stream._continueOpen()
|
||||
}
|
||||
|
||||
this._doDrain()
|
||||
}
|
||||
|
||||
_read (cb) {
|
||||
this._doDrain()
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function pack (opts) {
|
||||
return new Pack(opts)
|
||||
}
|
||||
|
||||
function modeToType (mode) {
|
||||
switch (mode & constants.S_IFMT) {
|
||||
case constants.S_IFBLK: return 'block-device'
|
||||
case constants.S_IFCHR: return 'character-device'
|
||||
case constants.S_IFDIR: return 'directory'
|
||||
case constants.S_IFIFO: return 'fifo'
|
||||
case constants.S_IFLNK: return 'symlink'
|
||||
}
|
||||
|
||||
return 'file'
|
||||
}
|
||||
|
||||
function noop () {}
|
||||
|
||||
function overflow (self, size) {
|
||||
size &= 511
|
||||
if (size) self.push(END_OF_TAR.subarray(0, 512 - size))
|
||||
}
|
||||
|
||||
function mapWritable (buf) {
|
||||
return b4a.isBuffer(buf) ? buf : b4a.from(buf)
|
||||
}
|
||||
29
resources/app/node_modules/tar-stream/package.json
generated
vendored
Normal file
29
resources/app/node_modules/tar-stream/package.json
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "tar-stream",
|
||||
"version": "3.1.7",
|
||||
"description": "tar-stream is a streaming tar parser and generator and nothing else. It operates purely using streams which means you can easily extract/parse tarballs without ever hitting the file system.",
|
||||
"main": "index.js",
|
||||
"files": [
|
||||
"*.js"
|
||||
],
|
||||
"browser": {
|
||||
"fs": false
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/mafintosh/tar-stream.git"
|
||||
},
|
||||
"author": "Mathias Buus <mathiasbuus@gmail.com>",
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/mafintosh/tar-stream",
|
||||
"dependencies": {
|
||||
"b4a": "^1.6.4",
|
||||
"fast-fifo": "^1.2.0",
|
||||
"streamx": "^2.15.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"brittle": "^3.3.2",
|
||||
"concat-stream": "^2.0.0",
|
||||
"standard": "^17.0.1"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user