Commit 8278dd9d authored by 智慧's avatar 智慧

feat:添加runtime.js脚本作为插件;添加点击事件,在精灵上面绑定button

parent 5f340bdd
../mkdirp/bin/cmd.js
\ No newline at end of file
{
"predef": [ ]
, "bitwise": false
, "camelcase": false
, "curly": false
, "eqeqeq": false
, "forin": false
, "immed": false
, "latedef": false
, "noarg": true
, "noempty": true
, "nonew": true
, "plusplus": false
, "quotmark": true
, "regexp": false
, "undef": true
, "unused": true
, "strict": false
, "trailing": true
, "maxlen": 120
, "asi": true
, "boss": true
, "debug": true
, "eqnull": true
, "esnext": true
, "evil": true
, "expr": true
, "funcscope": false
, "globalstrict": false
, "iterator": false
, "lastsemic": true
, "laxbreak": true
, "laxcomma": true
, "loopfunc": true
, "multistr": false
, "onecase": false
, "proto": false
, "regexdash": false
, "scripturl": true
, "smarttabs": false
, "shadow": false
, "sub": true
, "supernew": false
, "validthis": true
, "browser": true
, "couch": false
, "devel": false
, "dojo": false
, "mootools": false
, "node": true
, "nonstandard": true
, "prototypejs": false
, "rhino": false
, "worker": true
, "wsh": false
, "nomen": false
, "onevar": false
, "passfail": false
}
\ No newline at end of file
sudo: false
language: node_js
node_js:
- '6'
- '8'
- '10'
- '12'
- '14'
- lts/*
notifications:
email:
- rod@vagg.org
- matteo.collina@gmail.com
The MIT License (MIT)
=====================
Copyright (c) 2013-2016 bl contributors
----------------------------------
*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
var DuplexStream = require('readable-stream/duplex')
, util = require('util')
, Buffer = require('safe-buffer').Buffer
function BufferList (callback) {
if (!(this instanceof BufferList))
return new BufferList(callback)
this._bufs = []
this.length = 0
if (typeof callback == 'function') {
this._callback = callback
var piper = function piper (err) {
if (this._callback) {
this._callback(err)
this._callback = null
}
}.bind(this)
this.on('pipe', function onPipe (src) {
src.on('error', piper)
})
this.on('unpipe', function onUnpipe (src) {
src.removeListener('error', piper)
})
} else {
this.append(callback)
}
DuplexStream.call(this)
}
util.inherits(BufferList, DuplexStream)
BufferList.prototype._offset = function _offset (offset) {
var tot = 0, i = 0, _t
if (offset === 0) return [ 0, 0 ]
for (; i < this._bufs.length; i++) {
_t = tot + this._bufs[i].length
if (offset < _t || i == this._bufs.length - 1)
return [ i, offset - tot ]
tot = _t
}
}
BufferList.prototype.append = function append (buf) {
var i = 0
if (Buffer.isBuffer(buf)) {
this._appendBuffer(buf);
} else if (Array.isArray(buf)) {
for (; i < buf.length; i++)
this.append(buf[i])
} else if (buf instanceof BufferList) {
// unwrap argument into individual BufferLists
for (; i < buf._bufs.length; i++)
this.append(buf._bufs[i])
} else if (buf != null) {
// coerce number arguments to strings, since Buffer(number) does
// uninitialized memory allocation
if (typeof buf == 'number')
buf = buf.toString()
this._appendBuffer(Buffer.from(buf));
}
return this
}
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
this._bufs.push(buf)
this.length += buf.length
}
BufferList.prototype._write = function _write (buf, encoding, callback) {
this._appendBuffer(buf)
if (typeof callback == 'function')
callback()
}
BufferList.prototype._read = function _read (size) {
if (!this.length)
return this.push(null)
size = Math.min(size, this.length)
this.push(this.slice(0, size))
this.consume(size)
}
BufferList.prototype.end = function end (chunk) {
DuplexStream.prototype.end.call(this, chunk)
if (this._callback) {
this._callback(null, this.slice())
this._callback = null
}
}
BufferList.prototype.get = function get (index) {
return this.slice(index, index + 1)[0]
}
BufferList.prototype.slice = function slice (start, end) {
if (typeof start == 'number' && start < 0)
start += this.length
if (typeof end == 'number' && end < 0)
end += this.length
return this.copy(null, 0, start, end)
}
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
if (typeof srcStart != 'number' || srcStart < 0)
srcStart = 0
if (typeof srcEnd != 'number' || srcEnd > this.length)
srcEnd = this.length
if (srcStart >= this.length)
return dst || Buffer.alloc(0)
if (srcEnd <= 0)
return dst || Buffer.alloc(0)
var copy = !!dst
, off = this._offset(srcStart)
, len = srcEnd - srcStart
, bytes = len
, bufoff = (copy && dstStart) || 0
, start = off[1]
, l
, i
// copy/slice everything
if (srcStart === 0 && srcEnd == this.length) {
if (!copy) { // slice, but full concat if multiple buffers
return this._bufs.length === 1
? this._bufs[0]
: Buffer.concat(this._bufs, this.length)
}
// copy, need to copy individual buffers
for (i = 0; i < this._bufs.length; i++) {
this._bufs[i].copy(dst, bufoff)
bufoff += this._bufs[i].length
}
return dst
}
// easy, cheap case where it's a subset of one of the buffers
if (bytes <= this._bufs[off[0]].length - start) {
return copy
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
: this._bufs[off[0]].slice(start, start + bytes)
}
if (!copy) // a slice, we need something to copy in to
dst = Buffer.allocUnsafe(len)
for (i = off[0]; i < this._bufs.length; i++) {
l = this._bufs[i].length - start
if (bytes > l) {
this._bufs[i].copy(dst, bufoff, start)
bufoff += l
} else {
this._bufs[i].copy(dst, bufoff, start, start + bytes)
bufoff += l
break
}
bytes -= l
if (start)
start = 0
}
// safeguard so that we don't return uninitialized memory
if (dst.length > bufoff) return dst.slice(0, bufoff)
return dst
}
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
start = start || 0
end = end || this.length
if (start < 0)
start += this.length
if (end < 0)
end += this.length
var startOffset = this._offset(start)
, endOffset = this._offset(end)
, buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
if (endOffset[1] == 0)
buffers.pop()
else
buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
if (startOffset[1] != 0)
buffers[0] = buffers[0].slice(startOffset[1])
return new BufferList(buffers)
}
BufferList.prototype.toString = function toString (encoding, start, end) {
return this.slice(start, end).toString(encoding)
}
BufferList.prototype.consume = function consume (bytes) {
// first, normalize the argument, in accordance with how Buffer does it
bytes = Math.trunc(bytes)
// do nothing if not a positive number
if (Number.isNaN(bytes) || bytes <= 0) return this
while (this._bufs.length) {
if (bytes >= this._bufs[0].length) {
bytes -= this._bufs[0].length
this.length -= this._bufs[0].length
this._bufs.shift()
} else {
this._bufs[0] = this._bufs[0].slice(bytes)
this.length -= bytes
break
}
}
return this
}
BufferList.prototype.duplicate = function duplicate () {
var i = 0
, copy = new BufferList()
for (; i < this._bufs.length; i++)
copy.append(this._bufs[i])
return copy
}
BufferList.prototype.destroy = function destroy () {
this._bufs.length = 0
this.length = 0
this.push(null)
}
;(function () {
var methods = {
'readDoubleBE' : 8
, 'readDoubleLE' : 8
, 'readFloatBE' : 4
, 'readFloatLE' : 4
, 'readInt32BE' : 4
, 'readInt32LE' : 4
, 'readUInt32BE' : 4
, 'readUInt32LE' : 4
, 'readInt16BE' : 2
, 'readInt16LE' : 2
, 'readUInt16BE' : 2
, 'readUInt16LE' : 2
, 'readInt8' : 1
, 'readUInt8' : 1
}
for (var m in methods) {
(function (m) {
BufferList.prototype[m] = function (offset) {
return this.slice(offset, offset + methods[m])[m](0)
}
}(m))
}
}())
module.exports = BufferList
This diff is collapsed.
function allocUnsafe (size) {
if (typeof size !== 'number') {
throw new TypeError('"size" argument must be a number')
}
if (size < 0) {
throw new RangeError('"size" argument must not be negative')
}
if (Buffer.allocUnsafe) {
return Buffer.allocUnsafe(size)
} else {
return new Buffer(size)
}
}
module.exports = allocUnsafe
var bufferFill = require('buffer-fill')
var allocUnsafe = require('buffer-alloc-unsafe')
module.exports = function alloc (size, fill, encoding) {
if (typeof size !== 'number') {
throw new TypeError('"size" argument must be a number')
}
if (size < 0) {
throw new RangeError('"size" argument must not be negative')
}
if (Buffer.alloc) {
return Buffer.alloc(size, fill, encoding)
}
var buffer = allocUnsafe(size)
if (size === 0) {
return buffer
}
if (fill === undefined) {
return bufferFill(buffer, 0)
}
if (typeof encoding !== 'string') {
encoding = undefined
}
return bufferFill(buffer, fill, encoding)
}
The MIT License
Copyright (c) 2013 Brian J. Brennan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
var Buffer = require('buffer').Buffer;
var CRC_TABLE = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
];
if (typeof Int32Array !== 'undefined') {
CRC_TABLE = new Int32Array(CRC_TABLE);
}
function ensureBuffer(input) {
if (Buffer.isBuffer(input)) {
return input;
}
var hasNewBufferAPI =
typeof Buffer.alloc === "function" &&
typeof Buffer.from === "function";
if (typeof input === "number") {
return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input);
}
else if (typeof input === "string") {
return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input);
}
else {
throw new Error("input must be buffer, number, or string, received " +
typeof input);
}
}
function bufferizeInt(num) {
var tmp = ensureBuffer(4);
tmp.writeInt32BE(num, 0);
return tmp;
}
function _crc32(buf, previous) {
buf = ensureBuffer(buf);
if (Buffer.isBuffer(previous)) {
previous = previous.readUInt32BE(0);
}
var crc = ~~previous ^ -1;
for (var n = 0; n < buf.length; n++) {
crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8);
}
return (crc ^ -1);
}
function crc32() {
return bufferizeInt(_crc32.apply(null, arguments));
}
crc32.signed = function () {
return _crc32.apply(null, arguments);
};
crc32.unsigned = function () {
return _crc32.apply(null, arguments) >>> 0;
};
module.exports = crc32;
/* Node.js 6.4.0 and up has full support */
var hasFullSupport = (function () {
try {
if (!Buffer.isEncoding('latin1')) {
return false
}
var buf = Buffer.alloc ? Buffer.alloc(4) : new Buffer(4)
buf.fill('ab', 'ucs2')
return (buf.toString('hex') === '61006200')
} catch (_) {
return false
}
}())
function isSingleByte (val) {
return (val.length === 1 && val.charCodeAt(0) < 256)
}
function fillWithNumber (buffer, val, start, end) {
if (start < 0 || end > buffer.length) {
throw new RangeError('Out of range index')
}
start = start >>> 0
end = end === undefined ? buffer.length : end >>> 0
if (end > start) {
buffer.fill(val, start, end)
}
return buffer
}
function fillWithBuffer (buffer, val, start, end) {
if (start < 0 || end > buffer.length) {
throw new RangeError('Out of range index')
}
if (end <= start) {
return buffer
}
start = start >>> 0
end = end === undefined ? buffer.length : end >>> 0
var pos = start
var len = val.length
while (pos <= (end - len)) {
val.copy(buffer, pos)
pos += len
}
if (pos !== end) {
val.copy(buffer, pos, 0, end - pos)
}
return buffer
}
function fill (buffer, val, start, end, encoding) {
if (hasFullSupport) {
return buffer.fill(val, start, end, encoding)
}
if (typeof val === 'number') {
return fillWithNumber(buffer, val, start, end)
}
if (typeof val === 'string') {
if (typeof start === 'string') {
encoding = start
start = 0
end = buffer.length
} else if (typeof end === 'string') {
encoding = end
end = buffer.length
}
if (encoding !== undefined && typeof encoding !== 'string') {
throw new TypeError('encoding must be a string')
}
if (encoding === 'latin1') {
encoding = 'binary'
}
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
throw new TypeError('Unknown encoding: ' + encoding)
}
if (val === '') {
return fillWithNumber(buffer, 0, start, end)
}
if (isSingleByte(val)) {
return fillWithNumber(buffer, val.charCodeAt(0), start, end)
}
val = new Buffer(val, encoding)
}
if (Buffer.isBuffer(val)) {
return fillWithBuffer(buffer, val, start, end)
}
// Other values (e.g. undefined, boolean, object) results in zero-fill
return fillWithNumber(buffer, 0, start, end)
}
module.exports = fill
1.5.1 / 2020-05-11
==================
**fixes**
* [[`f516814`](http://github.com/node-modules/compressing/commit/f51681490aeea44a7b27ec0c09d3fb3d0385c5c0)] - fix: index.d.ts streamHeader.name wrong declearing (#46) (shadyzoz <<ShadyZOZ@users.noreply.github.com>>)
1.5.0 / 2019-12-04
==================
**features**
* [[`15c29e9`](http://github.com/node-modules/compressing/commit/15c29e9893880d2c19c343d133edb50f0c55c713)] - feat: zip format support custom fileName encoding (#36) (fengmk2 <<fengmk2@gmail.com>>)
**fixes**
* [[`7d605fe`](http://github.com/node-modules/compressing/commit/7d605fe01a88bc6aab9a2b06a8725545f591bab9)] - fix: typescript error(#29) (Ruanyq <<yiqiang0930@163.com>>)
**others**
* [[`4808fb8`](http://github.com/node-modules/compressing/commit/4808fb8e1d6cbbb31c0e82c359ec04eccb0c1eaf)] - test: add node 11 (#20) (fengmk2 <<fengmk2@gmail.com>>)
1.4.0 / 2018-11-30
==================
**others**
* [[`1f352c8`](http://github.com/node-modules/compressing/commit/1f352c88028acf27c1881fd45d555094cb279c44)] - docs: add index.d.ts and test case (#17) (DiamondYuan <<541832074@qq.com>>)
1.3.2 / 2018-11-21
==================
**fixes**
* [[`3713a0b`](http://github.com/node-modules/compressing/commit/3713a0b8d5b03d61c111afbbd4b6226169afeb14)] - fix: handle error from yazl when file not exists (#19) (DiamondYuan <<541832074@qq.com>>)
1.3.1 / 2018-08-24
==================
**fixes**
* [[`b802819`](http://github.com/node-modules/compressing/commit/b8028195dd6e7200ff47c8f43f695d24838e986b)] - fix: keep stat mode when compress tar or tgz (#11) (Haoliang Gao <<sakura9515@gmail.com>>)
1.3.0 / 2018-08-13
==================
**features**
* [[`04feafa`](http://github.com/node-modules/compressing/commit/04feafa6a290d877044ed162ca4c7dcdc5e54e87)] - feat: support absolute path zip file (#10) (fengmk2 <<fengmk2@gmail.com>>)
1.2.4 / 2018-07-13
==================
* chore: replace multipipe with pump (#9)
1.2.3 / 2017-07-27
==================
* fix: should resolve when all fileWriteStream finished (#7)
1.2.2 / 2017-07-06
==================
* fix: make file mode correct (#6)
1.2.1 / 2017-07-01
==================
* test: fix test on Windows (#4)
1.2.0 / 2017-07-01
==================
* feat: add strip option when uncompress zip
1.1.0 / 2017-02-14
==================
* feat: uncompress (#2)
1.0.0 / 2016-12-24
==================
* rename to compressing
* feat: 1st implementation
* init
MIT License
Copyright (c) 2017-present node-modules and other contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
import { ReadStream, WriteStream } from 'fs'
type sourceType = string | Buffer | ReadStream
type destType = string | WriteStream
interface streamEntryOpts {
relativePath?: string
ignoreBase?: boolean
size?: number
}
interface streamHeader {
type: 'file' | 'directory',
name: string
}
export namespace gzip {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: destType, opts?: any): Promise<void>
export class FileStream extends ReadStream {
constructor(opts?: {
zlib?: object,
source: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
zlib?: object,
source: sourceType
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace tar {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
constructor(opts?: {
relativePath?: string,
size?: number,
suppressSizeWarning?: boolean,
source?: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source: sourceType
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace tgz {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
constructor(opts?: {
relativePath?: string,
size?: number,
suppressSizeWarning?: boolean,
zlib?: object,
source?: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source?: sourceType,
strip?: number
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace zip {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
/**
* If opts.source is a file path, opts.relativePath is optional, otherwise it's required.
*
* @param opts
*/
constructor(opts?: {
relativePath?: string,
yazl?: object,
source: string
} | {
relativePath: string,
yazl?: object,
source?: Buffer | ReadStream
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source?: sourceType,
strip?: number,
zipFileNameEncoding?: string
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
'use strict';
exports.zip = require('./lib/zip');
exports.gzip = require('./lib/gzip');
exports.tar = require('./lib/tar');
exports.tgz = require('./lib/tgz');
'use strict';
const stream = require('stream');
class BaseStream extends stream.Readable {
addEntry(/* entry, opts */) {
throw new Error('.addEntry not implemented in sub class!');
}
_read() {}
emit(event, data) {
if (event === 'error') {
const error = data;
if (error.name === 'Error') {
error.name = this.constructor.name + 'Error';
}
}
super.emit(event, data);
}
}
module.exports = BaseStream;
'use strict';
const stream = require('stream');
class UncompressBaseStream extends stream.Writable {
emit(event, data) {
if (event === 'error') {
const error = data;
if (error.name === 'Error') {
error.name = this.constructor.name + 'Error';
}
}
super.emit.apply(this, arguments);
}
}
module.exports = UncompressBaseStream;
'use strict';
const fs = require('fs');
const zlib = require('zlib');
const utils = require('../utils');
const streamifier = require('streamifier');
class GzipFileStream extends zlib.Gzip {
constructor(opts) {
opts = opts || {};
super(opts.zlib);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else undefined: do nothing
}
}
module.exports = GzipFileStream;
'use strict';
const utils = require('../utils');
const GzipFileStream = require('./file_stream');
const GzipUncompressStream = require('./uncompress_stream');
exports.FileStream = GzipFileStream;
exports.UncompressStream = GzipUncompressStream;
exports.compressFile = utils.makeFileProcessFn(GzipFileStream);
exports.uncompress = utils.makeFileProcessFn(GzipUncompressStream);
'use strict';
const fs = require('fs');
const zlib = require('zlib');
const utils = require('../utils');
const streamifier = require('streamifier');
class GzipUncompressStream extends zlib.Unzip {
constructor(opts) {
opts = opts || {};
super(opts.zlib);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
}
module.exports = GzipUncompressStream;
'use strict';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const tar = require('tar-stream');
const utils = require('../utils');
const ready = require('get-ready');
class TarFileStream extends stream.Transform {
constructor(opts) {
super(opts);
const pack = tar.pack();
pack.on('data', chunk => this.push(chunk));
pack.on('end', () => this.ready(true));
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
// stat file to get file size
fs.stat(opts.source, (err, stat) => {
if (err) return this.emit('error', err);
this.entry = pack.entry({ name: opts.relativePath || path.basename(opts.source), size: stat.size, mode: stat.mode & 0o777 }, err => {
if (err) return this.emit('error', err);
pack.finalize();
});
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
});
} else if (sourceType === 'buffer') {
if (!opts.relativePath) return this.emit('error', 'opts.relativePath is required if opts.source is a buffer');
pack.entry({ name: opts.relativePath }, opts.source);
pack.finalize();
this.end();
} else { // stream or undefined
if (!opts.relativePath) return process.nextTick(() => this.emit('error', 'opts.relativePath is required'));
if (opts.size) {
this.entry = pack.entry({ name: opts.relativePath, size: opts.size }, err => {
if (err) return this.emit('error', err);
pack.finalize();
});
} else {
if (!opts.suppressSizeWarning) {
console.warn('You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
}
const buf = [];
this.entry = new stream.Writable({
write(chunk, _, callback) {
buf.push(chunk);
callback();
},
});
this.entry.on('finish', () => {
pack.entry({ name: opts.relativePath }, Buffer.concat(buf));
pack.finalize();
});
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
}
}
_transform(chunk, encoding, callback) {
if (this.entry) {
this.entry.write(chunk, encoding, callback);
}
}
_flush(callback) {
if (this.entry) {
this.entry.end();
}
this.ready(callback);
}
}
ready.mixin(TarFileStream.prototype);
module.exports = TarFileStream;
'use strict';
const utils = require('../utils');
const TarStream = require('./stream');
const TarFileStream = require('./file_stream');
const TarUncompressStream = require('./uncompress_stream');
exports.Stream = TarStream;
exports.FileStream = TarFileStream;
exports.UncompressStream = TarUncompressStream;
exports.compressDir = utils.makeCompressDirFn(TarStream);
exports.compressFile = utils.makeFileProcessFn(TarFileStream);
exports.uncompress = utils.makeUncompressFn(TarUncompressStream);
'use strict';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const tar = require('tar-stream');
const utils = require('../utils');
const BaseStream = require('../base_stream');
class TarStream extends BaseStream {
constructor(opts) {
super(opts);
this._waitingEntries = [];
this._processing = false;
this._init(opts);
}
_init() {
const pack = this._pack = tar.pack();
pack.on('end', () => this.push(null));
pack.on('data', chunk => this.push(chunk));
pack.on('error', err => this.emit('error', err));
}
addEntry(entry, opts) {
if (this._processing) {
return this._waitingEntries.push([ entry, opts ]);
}
opts = opts || {};
this._processing = true;
const entryType = utils.entryType(entry);
if (!entryType) return; // TODO
if (entryType === 'fileOrDir') {
this._addFileOrDirEntry(entry, opts);
} else if (entryType === 'buffer') {
this._addBufferEntry(entry, opts);
} else { // stream
this._addStreamEntry(entry, opts);
}
}
_addFileOrDirEntry(entry, opts) {
fs.stat(entry, (err, stat) => {
if (err) return this.emit('error', err);
if (stat.isDirectory()) return this._addDirEntry(entry, opts);
if (stat.isFile()) return this._addFileEntry(entry, opts);
const illigalEntryError = new Error('Type is not supported, must be a file path, directory path, file buffer, or a readable stream');
illigalEntryError.name = 'IlligalEntryError';
this.emit('error', illigalEntryError);
});
}
_addFileEntry(entry, opts) {
// stat file to get file size
fs.stat(entry, (err, stat) => {
if (err) return this.emit('error', err);
const entryStream = this._pack.entry({ name: opts.relativePath || path.basename(entry), size: stat.size, mode: stat.mode & 0o777 }, this._onEntryFinish.bind(this));
const stream = fs.createReadStream(entry, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(entryStream);
});
}
_addDirEntry(entry, opts) {
fs.readdir(entry, (err, files) => {
if (err) return this.emit('error', err);
const relativePath = opts.relativePath || '';
files.forEach(fileOrDir => {
const newOpts = utils.clone(opts);
if (opts.ignoreBase) {
newOpts.relativePath = path.join(relativePath, fileOrDir);
} else {
newOpts.relativePath = path.join(relativePath, path.basename(entry), fileOrDir);
}
newOpts.ignoreBase = true;
this.addEntry(path.join(entry, fileOrDir), newOpts);
});
this._onEntryFinish();
});
}
_addBufferEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', 'opts.relativePath is required if entry is a buffer');
this._pack.entry({ name: opts.relativePath }, entry, this._onEntryFinish.bind(this));
}
_addStreamEntry(entry, opts) {
entry.on('error', err => this.emit('error', err));
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required'));
if (opts.size) {
const entryStream = this._pack.entry({ name: opts.relativePath, size: opts.size }, this._onEntryFinish.bind(this));
entry.pipe(entryStream);
} else {
if (!opts.suppressSizeWarning) {
console.warn('You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
}
const buf = [];
const collectStream = new stream.Writable({
write(chunk, _, callback) {
buf.push(chunk);
callback();
},
});
collectStream.on('error', err => this.emit('error', err));
collectStream.on('finish', () => {
this._pack.entry({ name: opts.relativePath }, Buffer.concat(buf), this._onEntryFinish.bind(this));
});
entry.pipe(collectStream);
}
}
_read() {}
_onEntryFinish(err) {
if (err) return this.emit('error', err);
this._processing = false;
const waitingEntry = this._waitingEntries.shift();
if (waitingEntry) {
this.addEntry.apply(this, waitingEntry);
} else {
this._finalize();
}
}
_finalize() {
this._pack.finalize();
}
}
module.exports = TarStream;
'use strict';
const fs = require('fs');
const tar = require('tar-stream');
const utils = require('../utils');
const streamifier = require('streamifier');
// stream.Writable
class TarUncompressStream extends tar.extract {
constructor(opts) {
opts = opts || {};
super(opts);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
}
module.exports = TarUncompressStream;
'use strict';
const tar = require('../tar');
const gzip = require('../gzip');
const utils = require('../utils');
const stream = require('stream');
const pump = require('pump');
const ready = require('get-ready');
class TgzFileStream extends stream.Transform {
constructor(opts) {
opts = opts || {};
super(opts);
const sourceType = this._sourceType = utils.sourceType(opts.source);
const tarStream = this._tarStream = new tar.FileStream(opts);
opts = utils.clone(opts);
delete opts.source;
const gzipStream = new gzip.FileStream(opts);
gzipStream.on('data', chunk => {
this.push(chunk);
});
gzipStream.on('end', () => this.ready(true));
pump(tarStream, gzipStream, err => {
err && this.emit('error', err);
});
if (sourceType !== 'stream' && sourceType !== undefined) {
this.end();
}
}
_transform(chunk, encoding, callback) {
this._tarStream.write(chunk, encoding, callback);
}
_flush(callback) {
if (this._sourceType === 'stream' || this._sourceType === undefined) {
this._tarStream.end();
}
this.ready(callback);
}
}
ready.mixin(TgzFileStream.prototype);
module.exports = TgzFileStream;
'use strict';
const utils = require('../utils');
const TgzStream = require('./stream');
const TgzFileStream = require('./file_stream');
const TgzUncompressStream = require('./uncompress_stream');
exports.Stream = TgzStream;
exports.FileStream = TgzFileStream;
exports.UncompressStream = TgzUncompressStream;
exports.compressDir = utils.makeCompressDirFn(TgzStream);
exports.compressFile = utils.makeFileProcessFn(TgzFileStream);
exports.uncompress = utils.makeUncompressFn(TgzUncompressStream);
'use strict';
const tar = require('../tar');
const gzip = require('../gzip');
const BaseStream = require('../base_stream');
class TgzStream extends BaseStream {
constructor(opts) {
super(opts);
const tarStream = this._tarStream = new tar.Stream();
tarStream.on('error', err => this.emit('error', err));
const gzipStream = new gzip.FileStream();
gzipStream.on('end', () => this.push(null));
gzipStream.on('data', chunk => this.push(chunk));
gzipStream.on('error', err => this.emit('error', err));
tarStream.pipe(gzipStream);
}
addEntry(entry, opts) {
this._tarStream.addEntry(entry, opts);
}
}
module.exports = TgzStream;
'use strict';
const fs = require('fs');
const utils = require('../utils');
const ready = require('get-ready');
const streamifier = require('streamifier');
const FlushWritable = require('flushwritable');
const GzipUncompressStream = require('../gzip').UncompressStream;
const TarUncompressStream = require('../tar').UncompressStream;
class TgzUncompressStream extends FlushWritable {
constructor(opts) {
opts = opts || {};
super(opts);
const newOpts = utils.clone(opts);
newOpts.source = undefined;
this._gzipStream = new GzipUncompressStream(newOpts)
.on('error', err => this.emit('error', err));
const tarStream = new TarUncompressStream(newOpts)
.on('finish', () => this.ready(true))
.on('entry', this.emit.bind(this, 'entry'))
.on('error', err => this.emit('error', err));
this._gzipStream.pipe(tarStream);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
_write(chunk, encoding, callback) {
this._gzipStream.write(chunk, encoding, callback);
}
_flush(callback) {
this._gzipStream.end();
this.ready(callback);
}
}
ready.mixin(TgzUncompressStream.prototype);
module.exports = TgzUncompressStream;
'use strict';
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
const pump = require('pump');
// file/fileBuffer/stream
exports.sourceType = source => {
if (!source) return undefined;
if (source instanceof Buffer) return 'buffer';
if (typeof source._read === 'function' || typeof source._transform === 'function') return 'stream';
if (typeof source !== 'string') {
const err = new Error('Type is not supported, must be a file path, file buffer, or a readable stream');
err.name = 'IlligalSourceError';
throw err;
}
return 'file';
};
function destType(dest) {
if (typeof dest._write === 'function' || typeof dest._transform === 'function') return 'stream';
if (typeof dest !== 'string') {
const err = new Error('Type is not supported, must be a file path, or a writable stream');
err.name = 'IlligalDestinationError';
throw err;
}
return 'path';
}
exports.destType = destType;
const illigalEntryError = new Error('Type is not supported, must be a file path, directory path, file buffer, or a readable stream');
illigalEntryError.name = 'IlligalEntryError';
// fileOrDir/fileBuffer/stream
exports.entryType = entry => {
if (!entry) return;
if (entry instanceof Buffer) return 'buffer';
if (typeof entry._read === 'function' || typeof entry._transform === 'function') return 'stream';
if (typeof entry !== 'string') throw illigalEntryError;
return 'fileOrDir';
};
exports.clone = obj => {
const newObj = {};
for (const i in obj) {
newObj[i] = obj[i];
}
return newObj;
};
exports.makeFileProcessFn = StreamClass => {
return (source, dest, opts) => {
opts = opts || {};
opts.source = source;
const destStream = destType(dest) === 'path' ? fs.createWriteStream(dest) : dest;
const compressStream = new StreamClass(opts);
return safePipe([ compressStream, destStream ]);
};
};
exports.makeCompressDirFn = StreamClass => {
return (dir, dest, opts) => {
const destStream = destType(dest) === 'path' ? fs.createWriteStream(dest) : dest;
const compressStream = new StreamClass();
compressStream.addEntry(dir, opts);
return safePipe([ compressStream, destStream ]);
};
};
exports.makeUncompressFn = StreamClass => {
return (source, destDir, opts) => {
opts = opts || {};
opts.source = source;
if (destType(destDir) !== 'path') {
const error = new Error('uncompress destination must be a directory');
error.name = 'IlligalDestError';
throw error;
}
return new Promise((resolve, reject) => {
mkdirp(destDir, err => {
if (err) return reject(err);
let entryCount = 0;
let successCount = 0;
let isFinish = false;
function done() {
// resolve when both stream finish and file write finish
if (isFinish && entryCount === successCount) resolve();
}
new StreamClass(opts)
.on('finish', () => {
isFinish = true;
done();
})
.on('error', reject)
.on('entry', (header, stream, next) => {
stream.on('end', next);
if (header.type === 'file') {
const fullpath = path.join(destDir, header.name);
mkdirp(path.dirname(fullpath), err => {
if (err) return reject(err);
entryCount++;
pump(stream, fs.createWriteStream(fullpath, { mode: header.mode }), err => {
if (err) return reject(err);
successCount++;
done();
});
});
} else { // directory
mkdirp(path.join(destDir, header.name), err => {
if (err) return reject(err);
stream.resume();
});
}
});
});
});
};
};
exports.streamToBuffer = stream => {
return new Promise((resolve, reject) => {
const chunks = [];
stream
.on('readable', () => {
let chunk;
while ((chunk = stream.read())) chunks.push(chunk);
})
.on('end', () => resolve(Buffer.concat(chunks)))
.on('error', err => reject(err));
});
};
function safePipe(streams) {
return new Promise((resolve, reject) => {
pump(streams[0], streams[1], err => {
if (err) return reject(err);
resolve();
});
});
}
exports.safePipe = safePipe;
exports.stripFileName = (strip, fileName, type) => {
// before
// node/package.json
// node/lib/index.js
//
// when strip 1
// package.json
// lib/index.js
//
// when strip 2
// package.json
// index.js
if (Buffer.isBuffer(fileName)) fileName = fileName.toString();
// use / instead of \\
if (fileName.indexOf('\\') !== -1) fileName = fileName.replace(/\\+/g, '/');
// fix absolute path
// /foo => foo
if (fileName[0] === '/') fileName = fileName.replace(/^\/+/, '');
let s = fileName.split('/');
// fix relative path
// foo/../bar/../../asdf/
// => asdf/
if (s.indexOf('..') !== -1) {
fileName = path.normalize(fileName);
// https://npm.taobao.org/mirrors/node/latest/docs/api/path.html#path_path_normalize_path
if (process.platform === 'win32') fileName = fileName.replace(/\\+/g, '/');
// replace '../' on ../../foo/bar
fileName = fileName.replace(/(\.\.\/)+/, '');
if (type === 'directory' && fileName && fileName[fileName.length - 1] !== '/') {
fileName += '/';
}
s = fileName.split('/');
}
strip = Math.min(strip, s.length - 1);
return s.slice(strip).join('/') || '/';
};
'use strict';
const path = require('path');
const yazl = require('yazl');
const assert = require('assert');
const stream = require('stream');
const utils = require('../utils');
const ready = require('get-ready');
class ZipFileStream extends stream.Transform {
constructor(opts) {
super(opts);
const sourceType = utils.sourceType(opts.source);
const zipfile = new yazl.ZipFile();
const zipStream = zipfile.outputStream;
zipStream.on('data', data => this.push(data));
zipStream.on('end', () => this.ready(true));
zipfile.on('error', err => this.emit('error', err));
if (sourceType !== 'file') {
assert(opts.relativePath, 'opts.relativePath is required when compressing a buffer, or a stream');
}
if (sourceType) {
this.end();
}
if (sourceType === 'file') {
zipfile.addFile(opts.source, opts.relativePath || path.basename(opts.source), opts.yazl);
} else if (sourceType === 'buffer') {
zipfile.addBuffer(opts.source, opts.relativePath, opts.yazl);
} else if (sourceType === 'stream') {
zipfile.addReadStream(opts.source, opts.relativePath, opts.yazl);
} else { // undefined
const passThrough = this._passThrough = new stream.PassThrough();
this.on('finish', () => passThrough.end());
zipfile.addReadStream(passThrough, opts.relativePath, opts.yazl);
}
zipfile.end(opts.yazl);
}
_transform(chunk, encoding, callback) {
if (this._passThrough) {
this._passThrough.write(chunk, encoding, callback);
}
}
_flush(callback) {
this.ready(callback);
}
}
ready.mixin(ZipFileStream.prototype);
module.exports = ZipFileStream;
'use strict';
const utils = require('../utils');
const ZipStream = require('./stream');
const ZipFileStream = require('./file_stream');
const ZipUncompressStream = require('./uncompress_stream');
exports.Stream = ZipStream;
exports.FileStream = ZipFileStream;
exports.UncompressStream = ZipUncompressStream;
exports.compressDir = utils.makeCompressDirFn(ZipStream);
exports.compressFile = utils.makeFileProcessFn(ZipFileStream);
exports.uncompress = utils.makeUncompressFn(ZipUncompressStream);
'use strict';
const path = require('path');
const yazl = require('yazl');
const TarStream = require('../tar/stream');
class ZipStream extends TarStream {
_init() {
const zipfile = this._zipfile = new yazl.ZipFile();
const stream = zipfile.outputStream;
stream.on('end', () => this.push(null));
stream.on('data', chunk => this.push(chunk));
stream.on('error', err => this.emit('error', err));
}
_addFileEntry(entry, opts) {
this._zipfile.addFile(entry, opts.relativePath || path.basename(entry), opts);
this._onEntryFinish();
}
_addBufferEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required if entry is a buffer'));
this._zipfile.addBuffer(entry, opts.relativePath, opts);
this._onEntryFinish();
}
_addStreamEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required if entry is a stream'));
entry.on('error', err => this.emit('error', err));
this._zipfile.addReadStream(entry, opts.relativePath, opts);
this._onEntryFinish();
}
_finalize() {
this._zipfile.end();
}
}
module.exports = ZipStream;
'use strict';
// https://github.com/thejoshwolfe/yauzl#no-streaming-unzip-api
const yauzl = require('yauzl');
const stream = require('stream');
const UncompressBaseStream = require('../base_write_stream');
const utils = require('../utils');
// lazy load iconv-lite
let iconv;
const YAUZL_CALLBACK = Symbol('ZipUncompressStream#yauzlCallback');
const STRIP_NAME = Symbol('ZipUncompressStream#stripName');
// don't decodeStrings on yauzl, we should handle fileName by ourself
// see validateFileName on https://github.com/thejoshwolfe/yauzl/blob/51010ce4e8c7e6345efe195e1b4150518f37b393/index.js#L607
// - support "absolute path"
const DEFAULTS = { lazyEntries: true, decodeStrings: false };
class ZipUncompressStream extends UncompressBaseStream {
constructor(opts) {
opts = opts || {};
super(opts);
this._chunks = [];
this._strip = Number(opts.strip) || 0;
this._zipFileNameEncoding = opts.zipFileNameEncoding || 'utf8';
if (this._zipFileNameEncoding === 'utf-8') {
this._zipFileNameEncoding = 'utf8';
}
this[YAUZL_CALLBACK] = this[YAUZL_CALLBACK].bind(this);
const sourceType = utils.sourceType(opts.source);
const yauzlOpts = this._yauzlOpts = Object.assign({}, DEFAULTS, opts.yauzl);
if (sourceType === 'file') {
yauzl.open(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
return;
}
if (sourceType === 'buffer') {
yauzl.fromBuffer(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
return;
}
if (sourceType === 'stream') {
utils.streamToBuffer(opts.source)
.then(buf => yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]))
.catch(e => this.emit('error', e));
return;
}
this.on('pipe', srcStream => {
srcStream.unpipe(srcStream);
utils.streamToBuffer(srcStream)
.then(buf => {
this._chunks.push(buf);
buf = Buffer.concat(this._chunks);
yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]);
})
.catch(e => this.emit('error', e));
});
}
_write(chunk) {
// push to _chunks array, this will only happen once, for stream will be unpiped.
this._chunks.push(chunk);
}
[YAUZL_CALLBACK](err, zipFile) {
if (err) return this.emit('error', err);
zipFile.readEntry();
zipFile
.on('entry', entry => {
// fileName is buffer by default because decodeStrings = false
if (Buffer.isBuffer(entry.fileName)) {
if (this._zipFileNameEncoding === 'utf8') {
entry.fileName = entry.fileName.toString();
} else {
if (!iconv) {
iconv = require('iconv-lite');
}
entry.fileName = iconv.decode(entry.fileName, this._zipFileNameEncoding);
}
}
// directory file names end with '/'
const type = /\/$/.test(entry.fileName) ? 'directory' : 'file';
const name = entry.fileName = this[STRIP_NAME](entry.fileName, type);
const header = { name, type, yauzl: entry };
if (type === 'file') {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return this.emit('error', err);
this.emit('entry', header, readStream, next);
});
} else { // directory
const placeholder = new stream.Readable({ read() {} });
this.emit('entry', header, placeholder, next);
setImmediate(() => placeholder.emit('end'));
}
})
.on('end', () => this.emit('finish'))
.on('error', err => this.emit('error', err));
function next() {
zipFile.readEntry();
}
}
[STRIP_NAME](fileName, type) {
return utils.stripFileName(this._strip, fileName, type);
}
}
module.exports = ZipUncompressStream;
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
This diff is collapsed.
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// NOTE: These type checking functions intentionally don't use `instanceof`
// because it is fragile and can be easily faked with `Object.create()`.
function isArray(arg) {
if (Array.isArray) {
return Array.isArray(arg);
}
return objectToString(arg) === '[object Array]';
}
exports.isArray = isArray;
function isBoolean(arg) {
return typeof arg === 'boolean';
}
exports.isBoolean = isBoolean;
function isNull(arg) {
return arg === null;
}
exports.isNull = isNull;
function isNullOrUndefined(arg) {
return arg == null;
}
exports.isNullOrUndefined = isNullOrUndefined;
function isNumber(arg) {
return typeof arg === 'number';
}
exports.isNumber = isNumber;
function isString(arg) {
return typeof arg === 'string';
}
exports.isString = isString;
function isSymbol(arg) {
return typeof arg === 'symbol';
}
exports.isSymbol = isSymbol;
function isUndefined(arg) {
return arg === void 0;
}
exports.isUndefined = isUndefined;
function isRegExp(re) {
return objectToString(re) === '[object RegExp]';
}
exports.isRegExp = isRegExp;
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
exports.isObject = isObject;
function isDate(d) {
return objectToString(d) === '[object Date]';
}
exports.isDate = isDate;
function isError(e) {
return (objectToString(e) === '[object Error]' || e instanceof Error);
}
exports.isError = isError;
function isFunction(arg) {
return typeof arg === 'function';
}
exports.isFunction = isFunction;
function isPrimitive(arg) {
return arg === null ||
typeof arg === 'boolean' ||
typeof arg === 'number' ||
typeof arg === 'string' ||
typeof arg === 'symbol' || // ES6 symbol
typeof arg === 'undefined';
}
exports.isPrimitive = isPrimitive;
exports.isBuffer = Buffer.isBuffer;
function objectToString(o) {
return Object.prototype.toString.call(o);
}
var assert = require('tap');
var t = require('./lib/util');
assert.equal(t.isArray([]), true);
assert.equal(t.isArray({}), false);
assert.equal(t.isBoolean(null), false);
assert.equal(t.isBoolean(true), true);
assert.equal(t.isBoolean(false), true);
assert.equal(t.isNull(null), true);
assert.equal(t.isNull(undefined), false);
assert.equal(t.isNull(false), false);
assert.equal(t.isNull(), false);
assert.equal(t.isNullOrUndefined(null), true);
assert.equal(t.isNullOrUndefined(undefined), true);
assert.equal(t.isNullOrUndefined(false), false);
assert.equal(t.isNullOrUndefined(), true);
assert.equal(t.isNumber(null), false);
assert.equal(t.isNumber('1'), false);
assert.equal(t.isNumber(1), true);
assert.equal(t.isString(null), false);
assert.equal(t.isString('1'), true);
assert.equal(t.isString(1), false);
assert.equal(t.isSymbol(null), false);
assert.equal(t.isSymbol('1'), false);
assert.equal(t.isSymbol(1), false);
assert.equal(t.isSymbol(Symbol()), true);
assert.equal(t.isUndefined(null), false);
assert.equal(t.isUndefined(undefined), true);
assert.equal(t.isUndefined(false), false);
assert.equal(t.isUndefined(), true);
assert.equal(t.isRegExp(null), false);
assert.equal(t.isRegExp('1'), false);
assert.equal(t.isRegExp(new RegExp()), true);
assert.equal(t.isObject({}), true);
assert.equal(t.isObject([]), true);
assert.equal(t.isObject(new RegExp()), true);
assert.equal(t.isObject(new Date()), true);
assert.equal(t.isDate(null), false);
assert.equal(t.isDate('1'), false);
assert.equal(t.isDate(new Date()), true);
assert.equal(t.isError(null), false);
assert.equal(t.isError({ err: true }), false);
assert.equal(t.isError(new Error()), true);
assert.equal(t.isFunction(null), false);
assert.equal(t.isFunction({ }), false);
assert.equal(t.isFunction(function() {}), true);
assert.equal(t.isPrimitive(null), true);
assert.equal(t.isPrimitive(''), true);
assert.equal(t.isPrimitive(0), true);
assert.equal(t.isPrimitive(new Date()), false);
assert.equal(t.isBuffer(null), false);
assert.equal(t.isBuffer({}), false);
assert.equal(t.isBuffer(new Buffer(0)), true);
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\ No newline at end of file
var once = require('once');
var noop = function() {};
var isRequest = function(stream) {
return stream.setHeader && typeof stream.abort === 'function';
};
var isChildProcess = function(stream) {
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
};
var eos = function(stream, opts, callback) {
if (typeof opts === 'function') return eos(stream, null, opts);
if (!opts) opts = {};
callback = once(callback || noop);
var ws = stream._writableState;
var rs = stream._readableState;
var readable = opts.readable || (opts.readable !== false && stream.readable);
var writable = opts.writable || (opts.writable !== false && stream.writable);
var cancelled = false;
var onlegacyfinish = function() {
if (!stream.writable) onfinish();
};
var onfinish = function() {
writable = false;
if (!readable) callback.call(stream);
};
var onend = function() {
readable = false;
if (!writable) callback.call(stream);
};
var onexit = function(exitCode) {
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
};
var onerror = function(err) {
callback.call(stream, err);
};
var onclose = function() {
process.nextTick(onclosenexttick);
};
var onclosenexttick = function() {
if (cancelled) return;
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
};
var onrequest = function() {
stream.req.on('finish', onfinish);
};
if (isRequest(stream)) {
stream.on('complete', onfinish);
stream.on('abort', onclose);
if (stream.req) onrequest();
else stream.on('request', onrequest);
} else if (writable && !ws) { // legacy streams
stream.on('end', onlegacyfinish);
stream.on('close', onlegacyfinish);
}
if (isChildProcess(stream)) stream.on('exit', onexit);
stream.on('end', onend);
stream.on('finish', onfinish);
if (opts.error !== false) stream.on('error', onerror);
stream.on('close', onclose);
return function() {
cancelled = true;
stream.removeListener('complete', onfinish);
stream.removeListener('abort', onclose);
stream.removeListener('request', onrequest);
if (stream.req) stream.req.removeListener('finish', onfinish);
stream.removeListener('end', onlegacyfinish);
stream.removeListener('close', onlegacyfinish);
stream.removeListener('finish', onfinish);
stream.removeListener('exit', onexit);
stream.removeListener('end', onend);
stream.removeListener('error', onerror);
stream.removeListener('close', onclose);
};
};
module.exports = eos;
language: node_js
node_js:
- "0.10"
script:
- "npm run test-travis"
after_script:
- "npm install coveralls@2 && cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
### 1.0.1
* use `setImmediate` instead of `nextTick`
### 1.0.0
* `new FdSlicer(fd, options)` must now be `fdSlicer.createFromFd(fd, options)`
* fix behavior when `end` is 0.
* fix `createWriteStream` when using `createFromBuffer`
### 0.4.0
* add ability to create an FdSlicer instance from a Buffer
### 0.3.2
* fix write stream and read stream destroy behavior
### 0.3.1
* write stream: fix end option behavior
### 0.3.0
* write stream emits 'progress' events
* write stream supports 'end' option which causes the stream to emit an error
if a maximum size is exceeded
* improve documentation
### 0.2.1
* Update pend dependency to latest bugfix version.
### 0.2.0
* Add read and write functions
### 0.1.0
* Add `autoClose` option and `ref()` and `unref()`.
### 0.0.2
* Add API documentation
* read stream: create buffer at last possible moment
### 0.0.1
* Initial release
Copyright (c) 2014 Andrew Kelley
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
var fs = require('fs');
var util = require('util');
var stream = require('stream');
var Readable = stream.Readable;
var Writable = stream.Writable;
var PassThrough = stream.PassThrough;
var Pend = require('pend');
var EventEmitter = require('events').EventEmitter;
exports.createFromBuffer = createFromBuffer;
exports.createFromFd = createFromFd;
exports.BufferSlicer = BufferSlicer;
exports.FdSlicer = FdSlicer;
util.inherits(FdSlicer, EventEmitter);
function FdSlicer(fd, options) {
options = options || {};
EventEmitter.call(this);
this.fd = fd;
this.pend = new Pend();
this.pend.max = 1;
this.refCount = 0;
this.autoClose = !!options.autoClose;
}
FdSlicer.prototype.read = function(buffer, offset, length, position, callback) {
var self = this;
self.pend.go(function(cb) {
fs.read(self.fd, buffer, offset, length, position, function(err, bytesRead, buffer) {
cb();
callback(err, bytesRead, buffer);
});
});
};
FdSlicer.prototype.write = function(buffer, offset, length, position, callback) {
var self = this;
self.pend.go(function(cb) {
fs.write(self.fd, buffer, offset, length, position, function(err, written, buffer) {
cb();
callback(err, written, buffer);
});
});
};
FdSlicer.prototype.createReadStream = function(options) {
return new ReadStream(this, options);
};
FdSlicer.prototype.createWriteStream = function(options) {
return new WriteStream(this, options);
};
FdSlicer.prototype.ref = function() {
this.refCount += 1;
};
FdSlicer.prototype.unref = function() {
var self = this;
self.refCount -= 1;
if (self.refCount > 0) return;
if (self.refCount < 0) throw new Error("invalid unref");
if (self.autoClose) {
fs.close(self.fd, onCloseDone);
}
function onCloseDone(err) {
if (err) {
self.emit('error', err);
} else {
self.emit('close');
}
}
};
util.inherits(ReadStream, Readable);
function ReadStream(context, options) {
options = options || {};
Readable.call(this, options);
this.context = context;
this.context.ref();
this.start = options.start || 0;
this.endOffset = options.end;
this.pos = this.start;
this.destroyed = false;
}
ReadStream.prototype._read = function(n) {
var self = this;
if (self.destroyed) return;
var toRead = Math.min(self._readableState.highWaterMark, n);
if (self.endOffset != null) {
toRead = Math.min(toRead, self.endOffset - self.pos);
}
if (toRead <= 0) {
self.destroyed = true;
self.push(null);
self.context.unref();
return;
}
self.context.pend.go(function(cb) {
if (self.destroyed) return cb();
var buffer = new Buffer(toRead);
fs.read(self.context.fd, buffer, 0, toRead, self.pos, function(err, bytesRead) {
if (err) {
self.destroy(err);
} else if (bytesRead === 0) {
self.destroyed = true;
self.push(null);
self.context.unref();
} else {
self.pos += bytesRead;
self.push(buffer.slice(0, bytesRead));
}
cb();
});
});
};
ReadStream.prototype.destroy = function(err) {
if (this.destroyed) return;
err = err || new Error("stream destroyed");
this.destroyed = true;
this.emit('error', err);
this.context.unref();
};
util.inherits(WriteStream, Writable);
function WriteStream(context, options) {
options = options || {};
Writable.call(this, options);
this.context = context;
this.context.ref();
this.start = options.start || 0;
this.endOffset = (options.end == null) ? Infinity : +options.end;
this.bytesWritten = 0;
this.pos = this.start;
this.destroyed = false;
this.on('finish', this.destroy.bind(this));
}
WriteStream.prototype._write = function(buffer, encoding, callback) {
var self = this;
if (self.destroyed) return;
if (self.pos + buffer.length > self.endOffset) {
var err = new Error("maximum file length exceeded");
err.code = 'ETOOBIG';
self.destroy();
callback(err);
return;
}
self.context.pend.go(function(cb) {
if (self.destroyed) return cb();
fs.write(self.context.fd, buffer, 0, buffer.length, self.pos, function(err, bytes) {
if (err) {
self.destroy();
cb();
callback(err);
} else {
self.bytesWritten += bytes;
self.pos += bytes;
self.emit('progress');
cb();
callback();
}
});
});
};
WriteStream.prototype.destroy = function() {
if (this.destroyed) return;
this.destroyed = true;
this.context.unref();
};
util.inherits(BufferSlicer, EventEmitter);
function BufferSlicer(buffer, options) {
EventEmitter.call(this);
options = options || {};
this.refCount = 0;
this.buffer = buffer;
this.maxChunkSize = options.maxChunkSize || Number.MAX_SAFE_INTEGER;
}
BufferSlicer.prototype.read = function(buffer, offset, length, position, callback) {
var end = position + length;
var delta = end - this.buffer.length;
var written = (delta > 0) ? delta : length;
this.buffer.copy(buffer, offset, position, end);
setImmediate(function() {
callback(null, written);
});
};
BufferSlicer.prototype.write = function(buffer, offset, length, position, callback) {
buffer.copy(this.buffer, position, offset, offset + length);
setImmediate(function() {
callback(null, length, buffer);
});
};
BufferSlicer.prototype.createReadStream = function(options) {
options = options || {};
var readStream = new PassThrough(options);
readStream.destroyed = false;
readStream.start = options.start || 0;
readStream.endOffset = options.end;
// by the time this function returns, we'll be done.
readStream.pos = readStream.endOffset || this.buffer.length;
// respect the maxChunkSize option to slice up the chunk into smaller pieces.
var entireSlice = this.buffer.slice(readStream.start, readStream.pos);
var offset = 0;
while (true) {
var nextOffset = offset + this.maxChunkSize;
if (nextOffset >= entireSlice.length) {
// last chunk
if (offset < entireSlice.length) {
readStream.write(entireSlice.slice(offset, entireSlice.length));
}
break;
}
readStream.write(entireSlice.slice(offset, nextOffset));
offset = nextOffset;
}
readStream.end();
readStream.destroy = function() {
readStream.destroyed = true;
};
return readStream;
};
BufferSlicer.prototype.createWriteStream = function(options) {
var bufferSlicer = this;
options = options || {};
var writeStream = new Writable(options);
writeStream.start = options.start || 0;
writeStream.endOffset = (options.end == null) ? this.buffer.length : +options.end;
writeStream.bytesWritten = 0;
writeStream.pos = writeStream.start;
writeStream.destroyed = false;
writeStream._write = function(buffer, encoding, callback) {
if (writeStream.destroyed) return;
var end = writeStream.pos + buffer.length;
if (end > writeStream.endOffset) {
var err = new Error("maximum file length exceeded");
err.code = 'ETOOBIG';
writeStream.destroyed = true;
callback(err);
return;
}
buffer.copy(bufferSlicer.buffer, writeStream.pos, 0, buffer.length);
writeStream.bytesWritten += buffer.length;
writeStream.pos = end;
writeStream.emit('progress');
callback();
};
writeStream.destroy = function() {
writeStream.destroyed = true;
};
return writeStream;
};
BufferSlicer.prototype.ref = function() {
this.refCount += 1;
};
BufferSlicer.prototype.unref = function() {
this.refCount -= 1;
if (this.refCount < 0) {
throw new Error("invalid unref");
}
};
function createFromBuffer(buffer, options) {
return new BufferSlicer(buffer, options);
}
function createFromFd(fd, options) {
return new FdSlicer(fd, options);
}
This diff is collapsed.
language: node_js
node_js:
- "0.10"
# FlushWritable
A Writable stream that flushes before emitting finish.
##ChangeLog
### v1.0.0
- **Initial Public Release**
The MIT License (MIT)
Copyright (c) 2014 Tom Frost
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
/*
* FlushWritable
* Copyright 2014 Tom Frost
*/
var EventEmitter = require('events').EventEmitter,
Writable = require('stream').Writable,
util = require('util');
/**
* FlushWritable is a drop-in replacement for stream.Writable that implements
* the Transform stream's _flush() method. FlushWritable is meant to be
* extended, just like stream.Writable. However, in the child class's
* prototype, a method called _flush(cb) can be defined that will halt the
* firing of the 'finish' event until the callback is called. If the callback
* if called with a truthy first argument, 'error' is emitted instead.
* @param {Object} [opts] Options to configure this Writable stream. See the
* Node.js docs for stream.Writable.
* @constructor
*/
function FlushWritable(opts) {
Writable.call(this, opts);
}
util.inherits(FlushWritable, Writable);
FlushWritable.prototype.emit = function(evt) {
if (evt === 'finish' && this._flush && !Writable.prototype._flush) {
this._flush(function(err) {
if (err)
EventEmitter.prototype.emit.call(this, 'error', err);
else
EventEmitter.prototype.emit.call(this, 'finish');
}.bind(this));
}
else {
var args = Array.prototype.slice.call(arguments);
EventEmitter.prototype.emit.apply(this, args);
}
};
module.exports = FlushWritable;
/*
* FlushWritable
* Copyright 2014 Tom Frost
*/
var FlushWritable = require('../lib/FlushWritable'),
Readable = require('stream').Readable,
util = require('util'),
should = require('should');
function TestWritable() {
FlushWritable.call(this);
}
util.inherits(TestWritable, FlushWritable);
TestWritable.prototype._flush = function(cb) {
this.flushCalled = true;
setTimeout(function() {
cb(this.err);
}.bind(this), 10);
};
TestWritable.prototype._write = function(data, encoding, cb) {
cb();
};
function TestReadable() {
this.i = 0;
Readable.call(this);
}
util.inherits(TestReadable, Readable);
TestReadable.prototype._read = function() {
if (this.i++ === 2)
this.push(null);
else
this.push('foo');
};
describe('FlushWritable', function() {
it('should call _flush prior to emitting finish', function(done) {
var r = new TestReadable(),
w = new TestWritable(),
finished = false;
w.on('finish', function() {
finished = true;
});
r.pipe(w);
setTimeout(function() {
w.should.have.property('flushCalled');
finished.should.eql(false);
setTimeout(function() {
finished.should.eql(true);
done();
}, 10);
}, 5);
});
it('should emit error instead of finish for errors in cb', function(done) {
var r = new TestReadable(),
w = new TestWritable(),
finished = false,
errored = false;
w.on('finish', function() {
finished = true;
});
w.on('error', function() {
errored = true;
});
w.err = new Error('bar');
r.pipe(w);
setTimeout(function() {
finished.should.eql(false);
errored.should.eql(true);
done();
}, 15);
});
it('should finish immediately if no _flush is defined', function(done) {
var r = new TestReadable(),
w = new TestWritable(),
finished = false;
w.on('finish', function() {
finished = true;
});
w._flush = undefined;
r.pipe(w);
setTimeout(function() {
finished.should.eql(true);
done();
}, 5);
});
});
The MIT License (MIT)
Copyright (c) 2018 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
module.exports = require('constants')
module.exports = require('fs').constants || require('constants')
1.0.0 / 2015-09-29
==================
* chore: use eslint and es6
* test: add test with co
* travis: test on node(1,2,3,4)
* feat: support promise
* fork from supershabam/ready
The MIT License (MIT)
Copyright (c) 2013 Ian Matthew Hansen
Copyright (c) 2015 node-modules and other contributors.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'use strict';
function ready(flagOrFunction) {
this._ready = !!this._ready;
this._readyCallbacks = this._readyCallbacks || [];
if (arguments.length === 0) {
// return a promise
// support `this.ready().then(onready);` and `yield this.ready()`;
return new Promise(function (resolve) {
if (this._ready) {
return resolve();
}
this._readyCallbacks.push(resolve);
}.bind(this));
} else if (typeof flagOrFunction === 'function') {
this._readyCallbacks.push(flagOrFunction);
} else {
this._ready = !!flagOrFunction;
}
if (this._ready) {
this._readyCallbacks.splice(0, Infinity).forEach(function(callback) {
process.nextTick(callback);
});
}
}
function mixin(object) {
object.ready = ready;
}
module.exports = mixin;
module.exports.mixin = mixin;
## 0.5.2 / 2020-06-08
* Added `iconv.getEncoder()` and `iconv.getDecoder()` methods to typescript definitions (#229).
* Fixed semver version to 6.1.2 to support Node 8.x (by @tanandara).
* Capped iconv version to 2.x as 3.x has dropped support for older Node versions.
* Switched from instanbul to c8 for code coverage.
## 0.5.1 / 2020-01-18
* Added cp720 encoding (#221, by @kr-deps)
* (minor) Changed Changelog.md formatting to use h2.
## 0.5.0 / 2019-06-26
* Added UTF-32 encoding, both little-endian and big-endian variants (UTF-32LE, UTF32-BE). If endianness
is not provided for decoding, it's deduced automatically from the stream using a heuristic similar to
what we use in UTF-16. (great work in #216 by @kshetline)
* Several minor updates to README (#217 by @oldj, plus some more)
* Added Node versions 10 and 12 to Travis test harness.
## 0.4.24 / 2018-08-22
* Added MIK encoding (#196, by @Ivan-Kalatchev)
## 0.4.23 / 2018-05-07
* Fix deprecation warning in Node v10 due to the last usage of `new Buffer` (#185, by @felixbuenemann)
* Switched from NodeBuffer to Buffer in typings (#155 by @felixfbecker, #186 by @larssn)
## 0.4.22 / 2018-05-05
* Use older semver style for dependencies to be compatible with Node version 0.10 (#182, by @dougwilson)
* Fix tests to accomodate fixes in Node v10 (#182, by @dougwilson)
## 0.4.21 / 2018-04-06
* Fix encoding canonicalization (#156)
* Fix the paths in the "browser" field in package.json (#174 by @LMLB)
* Removed "contributors" section in package.json - see Git history instead.
## 0.4.20 / 2018-04-06
* Updated `new Buffer()` usages with recommended replacements as it's being deprecated in Node v10 (#176, #178 by @ChALkeR)
## 0.4.19 / 2017-09-09
* Fixed iso8859-1 codec regression in handling untranslatable characters (#162, caused by #147)
* Re-generated windows1255 codec, because it was updated in iconv project
* Fixed grammar in error message when iconv-lite is loaded with encoding other than utf8
## 0.4.18 / 2017-06-13
* Fixed CESU-8 regression in Node v8.
## 0.4.17 / 2017-04-22
* Updated typescript definition file to support Angular 2 AoT mode (#153 by @larssn)
## 0.4.16 / 2017-04-22
* Added support for React Native (#150)
* Changed iso8859-1 encoding to usine internal 'binary' encoding, as it's the same thing (#147 by @mscdex)
* Fixed typo in Readme (#138 by @jiangzhuo)
* Fixed build for Node v6.10+ by making correct version comparison
* Added a warning if iconv-lite is loaded not as utf-8 (see #142)
## 0.4.15 / 2016-11-21
* Fixed typescript type definition (#137)
## 0.4.14 / 2016-11-20
* Preparation for v1.0
* Added Node v6 and latest Node versions to Travis CI test rig
* Deprecated Node v0.8 support
* Typescript typings (@larssn)
* Fix encoding of Euro character in GB 18030 (inspired by @lygstate)
* Add ms prefix to dbcs windows encodings (@rokoroku)
## 0.4.13 / 2015-10-01
* Fix silly mistake in deprecation notice.
## 0.4.12 / 2015-09-26
* Node v4 support:
* Added CESU-8 decoding (#106)
* Added deprecation notice for `extendNodeEncodings`
* Added Travis tests for Node v4 and io.js latest (#105 by @Mithgol)
## 0.4.11 / 2015-07-03
* Added CESU-8 encoding.
## 0.4.10 / 2015-05-26
* Changed UTF-16 endianness heuristic to take into account any ASCII chars, not
just spaces. This should minimize the importance of "default" endianness.
## 0.4.9 / 2015-05-24
* Streamlined BOM handling: strip BOM by default, add BOM when encoding if
addBOM: true. Added docs to Readme.
* UTF16 now uses UTF16-LE by default.
* Fixed minor issue with big5 encoding.
* Added io.js testing on Travis; updated node-iconv version to test against.
Now we just skip testing SBCS encodings that node-iconv doesn't support.
* (internal refactoring) Updated codec interface to use classes.
* Use strict mode in all files.
## 0.4.8 / 2015-04-14
* added alias UNICODE-1-1-UTF-7 for UTF-7 encoding (#94)
## 0.4.7 / 2015-02-05
* stop official support of Node.js v0.8. Should still work, but no guarantees.
reason: Packages needed for testing are hard to get on Travis CI.
* work in environment where Object.prototype is monkey patched with enumerable
props (#89).
## 0.4.6 / 2015-01-12
* fix rare aliases of single-byte encodings (thanks @mscdex)
* double the timeout for dbcs tests to make them less flaky on travis
## 0.4.5 / 2014-11-20
* fix windows-31j and x-sjis encoding support (@nleush)
* minor fix: undefined variable reference when internal error happens
## 0.4.4 / 2014-07-16
* added encodings UTF-7 (RFC2152) and UTF-7-IMAP (RFC3501 Section 5.1.3)
* fixed streaming base64 encoding
## 0.4.3 / 2014-06-14
* added encodings UTF-16BE and UTF-16 with BOM
## 0.4.2 / 2014-06-12
* don't throw exception if `extendNodeEncodings()` is called more than once
## 0.4.1 / 2014-06-11
* codepage 808 added
## 0.4.0 / 2014-06-10
* code is rewritten from scratch
* all widespread encodings are supported
* streaming interface added
* browserify compatibility added
* (optional) extend core primitive encodings to make usage even simpler
* moved from vows to mocha as the testing framework
Copyright (c) 2011 Alexander Shtuchkin
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This diff is collapsed.
"use strict";
// Description of supported double byte encodings and aliases.
// Tables are not require()-d until they are needed to speed up library load.
// require()-s are direct to support Browserify.
module.exports = {
// == Japanese/ShiftJIS ====================================================
// All japanese encodings are based on JIS X set of standards:
// JIS X 0201 - Single-byte encoding of ASCII + ¥ + Kana chars at 0xA1-0xDF.
// JIS X 0208 - Main set of 6879 characters, placed in 94x94 plane, to be encoded by 2 bytes.
// Has several variations in 1978, 1983, 1990 and 1997.
// JIS X 0212 - Supplementary plane of 6067 chars in 94x94 plane. 1990. Effectively dead.
// JIS X 0213 - Extension and modern replacement of 0208 and 0212. Total chars: 11233.
// 2 planes, first is superset of 0208, second - revised 0212.
// Introduced in 2000, revised 2004. Some characters are in Unicode Plane 2 (0x2xxxx)
// Byte encodings are:
// * Shift_JIS: Compatible with 0201, uses not defined chars in top half as lead bytes for double-byte
// encoding of 0208. Lead byte ranges: 0x81-0x9F, 0xE0-0xEF; Trail byte ranges: 0x40-0x7E, 0x80-0x9E, 0x9F-0xFC.
// Windows CP932 is a superset of Shift_JIS. Some companies added more chars, notably KDDI.
// * EUC-JP: Up to 3 bytes per character. Used mostly on *nixes.
// 0x00-0x7F - lower part of 0201
// 0x8E, 0xA1-0xDF - upper part of 0201
// (0xA1-0xFE)x2 - 0208 plane (94x94).
// 0x8F, (0xA1-0xFE)x2 - 0212 plane (94x94).
// * JIS X 208: 7-bit, direct encoding of 0208. Byte ranges: 0x21-0x7E (94 values). Uncommon.
// Used as-is in ISO2022 family.
// * ISO2022-JP: Stateful encoding, with escape sequences to switch between ASCII,
// 0201-1976 Roman, 0208-1978, 0208-1983.
// * ISO2022-JP-1: Adds esc seq for 0212-1990.
// * ISO2022-JP-2: Adds esc seq for GB2313-1980, KSX1001-1992, ISO8859-1, ISO8859-7.
// * ISO2022-JP-3: Adds esc seq for 0201-1976 Kana set, 0213-2000 Planes 1, 2.
// * ISO2022-JP-2004: Adds 0213-2004 Plane 1.
//
// After JIS X 0213 appeared, Shift_JIS-2004, EUC-JISX0213 and ISO2022-JP-2004 followed, with just changing the planes.
//
// Overall, it seems that it's a mess :( http://www8.plala.or.jp/tkubota1/unicode-symbols-map2.html
'shiftjis': {
type: '_dbcs',
table: function() { return require('./tables/shiftjis.json') },
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
encodeSkipVals: [{from: 0xED40, to: 0xF940}],
},
'csshiftjis': 'shiftjis',
'mskanji': 'shiftjis',
'sjis': 'shiftjis',
'windows31j': 'shiftjis',
'ms31j': 'shiftjis',
'xsjis': 'shiftjis',
'windows932': 'shiftjis',
'ms932': 'shiftjis',
'932': 'shiftjis',
'cp932': 'shiftjis',
'eucjp': {
type: '_dbcs',
table: function() { return require('./tables/eucjp.json') },
encodeAdd: {'\u00a5': 0x5C, '\u203E': 0x7E},
},
// TODO: KDDI extension to Shift_JIS
// TODO: IBM CCSID 942 = CP932, but F0-F9 custom chars and other char changes.
// TODO: IBM CCSID 943 = Shift_JIS = CP932 with original Shift_JIS lower 128 chars.
// == Chinese/GBK ==========================================================
// http://en.wikipedia.org/wiki/GBK
// We mostly implement W3C recommendation: https://www.w3.org/TR/encoding/#gbk-encoder
// Oldest GB2312 (1981, ~7600 chars) is a subset of CP936
'gb2312': 'cp936',
'gb231280': 'cp936',
'gb23121980': 'cp936',
'csgb2312': 'cp936',
'csiso58gb231280': 'cp936',
'euccn': 'cp936',
// Microsoft's CP936 is a subset and approximation of GBK.
'windows936': 'cp936',
'ms936': 'cp936',
'936': 'cp936',
'cp936': {
type: '_dbcs',
table: function() { return require('./tables/cp936.json') },
},
// GBK (~22000 chars) is an extension of CP936 that added user-mapped chars and some other.
'gbk': {
type: '_dbcs',
table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) },
},
'xgbk': 'gbk',
'isoir58': 'gbk',
// GB18030 is an algorithmic extension of GBK.
// Main source: https://www.w3.org/TR/encoding/#gbk-encoder
// http://icu-project.org/docs/papers/gb18030.html
// http://source.icu-project.org/repos/icu/data/trunk/charset/data/xml/gb-18030-2000.xml
// http://www.khngai.com/chinese/charmap/tblgbk.php?page=0
'gb18030': {
type: '_dbcs',
table: function() { return require('./tables/cp936.json').concat(require('./tables/gbk-added.json')) },
gb18030: function() { return require('./tables/gb18030-ranges.json') },
encodeSkipVals: [0x80],
encodeAdd: {'': 0xA2E3},
},
'chinese': 'gb18030',
// == Korean ===============================================================
// EUC-KR, KS_C_5601 and KS X 1001 are exactly the same.
'windows949': 'cp949',
'ms949': 'cp949',
'949': 'cp949',
'cp949': {
type: '_dbcs',
table: function() { return require('./tables/cp949.json') },
},
'cseuckr': 'cp949',
'csksc56011987': 'cp949',
'euckr': 'cp949',
'isoir149': 'cp949',
'korean': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'ksc5601': 'cp949',
// == Big5/Taiwan/Hong Kong ================================================
// There are lots of tables for Big5 and cp950. Please see the following links for history:
// http://moztw.org/docs/big5/ http://www.haible.de/bruno/charsets/conversion-tables/Big5.html
// Variations, in roughly number of defined chars:
// * Windows CP 950: Microsoft variant of Big5. Canonical: http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP950.TXT
// * Windows CP 951: Microsoft variant of Big5-HKSCS-2001. Seems to be never public. http://me.abelcheung.org/articles/research/what-is-cp951/
// * Big5-2003 (Taiwan standard) almost superset of cp950.
// * Unicode-at-on (UAO) / Mozilla 1.8. Falling out of use on the Web. Not supported by other browsers.
// * Big5-HKSCS (-2001, -2004, -2008). Hong Kong standard.
// many unicode code points moved from PUA to Supplementary plane (U+2XXXX) over the years.
// Plus, it has 4 combining sequences.
// Seems that Mozilla refused to support it for 10 yrs. https://bugzilla.mozilla.org/show_bug.cgi?id=162431 https://bugzilla.mozilla.org/show_bug.cgi?id=310299
// because big5-hkscs is the only encoding to include astral characters in non-algorithmic way.
// Implementations are not consistent within browsers; sometimes labeled as just big5.
// MS Internet Explorer switches from big5 to big5-hkscs when a patch applied.
// Great discussion & recap of what's going on https://bugzilla.mozilla.org/show_bug.cgi?id=912470#c31
// In the encoder, it might make sense to support encoding old PUA mappings to Big5 bytes seq-s.
// Official spec: http://www.ogcio.gov.hk/en/business/tech_promotion/ccli/terms/doc/2003cmp_2008.txt
// http://www.ogcio.gov.hk/tc/business/tech_promotion/ccli/terms/doc/hkscs-2008-big5-iso.txt
//
// Current understanding of how to deal with Big5(-HKSCS) is in the Encoding Standard, http://encoding.spec.whatwg.org/#big5-encoder
// Unicode mapping (http://www.unicode.org/Public/MAPPINGS/OBSOLETE/EASTASIA/OTHER/BIG5.TXT) is said to be wrong.
'windows950': 'cp950',
'ms950': 'cp950',
'950': 'cp950',
'cp950': {
type: '_dbcs',
table: function() { return require('./tables/cp950.json') },
},
// Big5 has many variations and is an extension of cp950. We use Encoding Standard's as a consensus.
'big5': 'big5hkscs',
'big5hkscs': {
type: '_dbcs',
table: function() { return require('./tables/cp950.json').concat(require('./tables/big5-added.json')) },
encodeSkipVals: [0xa2cc],
},
'cnbig5': 'big5hkscs',
'csbig5': 'big5hkscs',
'xxbig5': 'big5hkscs',
};
"use strict";
// Update this array if you add/rename/remove files in this directory.
// We support Browserify by skipping automatic module discovery and requiring modules directly.
var modules = [
require("./internal"),
require("./utf32"),
require("./utf16"),
require("./utf7"),
require("./sbcs-codec"),
require("./sbcs-data"),
require("./sbcs-data-generated"),
require("./dbcs-codec"),
require("./dbcs-data"),
];
// Put all encoding/alias/codec definitions to single object and export it.
for (var i = 0; i < modules.length; i++) {
var module = modules[i];
for (var enc in module)
if (Object.prototype.hasOwnProperty.call(module, enc))
exports[enc] = module[enc];
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
{"uChars":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],"gbChars":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
The ISC License
Copyright (c) Isaac Z. Schlueter
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
PERFORMANCE OF THIS SOFTWARE.
try {
var util = require('util');
/* istanbul ignore next */
if (typeof util.inherits !== 'function') throw '';
module.exports = util.inherits;
} catch (e) {
/* istanbul ignore next */
module.exports = require('./inherits_browser.js');
}
This diff is collapsed.
language: node_js
node_js:
- "0.8"
- "0.10"
test:
@node_modules/.bin/tape test.js
.PHONY: test
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment