Commit b37e3492 authored by 范雪寒's avatar 范雪寒

chore:

parent 3ed799bd
{
"ver": "1.1.2",
"uuid": "810c015d-857f-44be-aa35-fcda8b54edb3",
"isBundle": false,
"bundleName": "",
"priority": 1,
"compressionType": {},
"optimizeHotUpdate": {},
"inlineSpriteFrames": {},
"isRemoteBundle": {
"ios": false,
"android": false
},
"subMetas": {}
}
\ No newline at end of file
{"ver":"1.1.2","uuid":"810c015d-857f-44be-aa35-fcda8b54edb3","isBundle":false,"bundleName":"","priority":1,"compressionType":{},"optimizeHotUpdate":{},"inlineSpriteFrames":{},"isRemoteBundle":{"ios":false,"android":false},"subMetas":{}}
\ No newline at end of file
../mkdirp/bin/cmd.js
\ No newline at end of file
This diff is collapsed.
{
"predef": [ ]
, "bitwise": false
, "camelcase": false
, "curly": false
, "eqeqeq": false
, "forin": false
, "immed": false
, "latedef": false
, "noarg": true
, "noempty": true
, "nonew": true
, "plusplus": false
, "quotmark": true
, "regexp": false
, "undef": true
, "unused": true
, "strict": false
, "trailing": true
, "maxlen": 120
, "asi": true
, "boss": true
, "debug": true
, "eqnull": true
, "esnext": true
, "evil": true
, "expr": true
, "funcscope": false
, "globalstrict": false
, "iterator": false
, "lastsemic": true
, "laxbreak": true
, "laxcomma": true
, "loopfunc": true
, "multistr": false
, "onecase": false
, "proto": false
, "regexdash": false
, "scripturl": true
, "smarttabs": false
, "shadow": false
, "sub": true
, "supernew": false
, "validthis": true
, "browser": true
, "couch": false
, "devel": false
, "dojo": false
, "mootools": false
, "node": true
, "nonstandard": true
, "prototypejs": false
, "rhino": false
, "worker": true
, "wsh": false
, "nomen": false
, "onevar": false
, "passfail": false
}
\ No newline at end of file
sudo: false
language: node_js
node_js:
- '6'
- '8'
- '10'
- '12'
- '14'
- lts/*
notifications:
email:
- rod@vagg.org
- matteo.collina@gmail.com
The MIT License (MIT)
=====================
Copyright (c) 2013-2016 bl contributors
----------------------------------
*bl contributors listed at <https://github.com/rvagg/bl#contributors>*
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
This diff is collapsed.
var DuplexStream = require('readable-stream/duplex')
, util = require('util')
, Buffer = require('safe-buffer').Buffer
function BufferList (callback) {
if (!(this instanceof BufferList))
return new BufferList(callback)
this._bufs = []
this.length = 0
if (typeof callback == 'function') {
this._callback = callback
var piper = function piper (err) {
if (this._callback) {
this._callback(err)
this._callback = null
}
}.bind(this)
this.on('pipe', function onPipe (src) {
src.on('error', piper)
})
this.on('unpipe', function onUnpipe (src) {
src.removeListener('error', piper)
})
} else {
this.append(callback)
}
DuplexStream.call(this)
}
util.inherits(BufferList, DuplexStream)
BufferList.prototype._offset = function _offset (offset) {
var tot = 0, i = 0, _t
if (offset === 0) return [ 0, 0 ]
for (; i < this._bufs.length; i++) {
_t = tot + this._bufs[i].length
if (offset < _t || i == this._bufs.length - 1)
return [ i, offset - tot ]
tot = _t
}
}
BufferList.prototype.append = function append (buf) {
var i = 0
if (Buffer.isBuffer(buf)) {
this._appendBuffer(buf);
} else if (Array.isArray(buf)) {
for (; i < buf.length; i++)
this.append(buf[i])
} else if (buf instanceof BufferList) {
// unwrap argument into individual BufferLists
for (; i < buf._bufs.length; i++)
this.append(buf._bufs[i])
} else if (buf != null) {
// coerce number arguments to strings, since Buffer(number) does
// uninitialized memory allocation
if (typeof buf == 'number')
buf = buf.toString()
this._appendBuffer(Buffer.from(buf));
}
return this
}
BufferList.prototype._appendBuffer = function appendBuffer (buf) {
this._bufs.push(buf)
this.length += buf.length
}
BufferList.prototype._write = function _write (buf, encoding, callback) {
this._appendBuffer(buf)
if (typeof callback == 'function')
callback()
}
BufferList.prototype._read = function _read (size) {
if (!this.length)
return this.push(null)
size = Math.min(size, this.length)
this.push(this.slice(0, size))
this.consume(size)
}
BufferList.prototype.end = function end (chunk) {
DuplexStream.prototype.end.call(this, chunk)
if (this._callback) {
this._callback(null, this.slice())
this._callback = null
}
}
BufferList.prototype.get = function get (index) {
return this.slice(index, index + 1)[0]
}
BufferList.prototype.slice = function slice (start, end) {
if (typeof start == 'number' && start < 0)
start += this.length
if (typeof end == 'number' && end < 0)
end += this.length
return this.copy(null, 0, start, end)
}
BufferList.prototype.copy = function copy (dst, dstStart, srcStart, srcEnd) {
if (typeof srcStart != 'number' || srcStart < 0)
srcStart = 0
if (typeof srcEnd != 'number' || srcEnd > this.length)
srcEnd = this.length
if (srcStart >= this.length)
return dst || Buffer.alloc(0)
if (srcEnd <= 0)
return dst || Buffer.alloc(0)
var copy = !!dst
, off = this._offset(srcStart)
, len = srcEnd - srcStart
, bytes = len
, bufoff = (copy && dstStart) || 0
, start = off[1]
, l
, i
// copy/slice everything
if (srcStart === 0 && srcEnd == this.length) {
if (!copy) { // slice, but full concat if multiple buffers
return this._bufs.length === 1
? this._bufs[0]
: Buffer.concat(this._bufs, this.length)
}
// copy, need to copy individual buffers
for (i = 0; i < this._bufs.length; i++) {
this._bufs[i].copy(dst, bufoff)
bufoff += this._bufs[i].length
}
return dst
}
// easy, cheap case where it's a subset of one of the buffers
if (bytes <= this._bufs[off[0]].length - start) {
return copy
? this._bufs[off[0]].copy(dst, dstStart, start, start + bytes)
: this._bufs[off[0]].slice(start, start + bytes)
}
if (!copy) // a slice, we need something to copy in to
dst = Buffer.allocUnsafe(len)
for (i = off[0]; i < this._bufs.length; i++) {
l = this._bufs[i].length - start
if (bytes > l) {
this._bufs[i].copy(dst, bufoff, start)
bufoff += l
} else {
this._bufs[i].copy(dst, bufoff, start, start + bytes)
bufoff += l
break
}
bytes -= l
if (start)
start = 0
}
// safeguard so that we don't return uninitialized memory
if (dst.length > bufoff) return dst.slice(0, bufoff)
return dst
}
BufferList.prototype.shallowSlice = function shallowSlice (start, end) {
start = start || 0
end = end || this.length
if (start < 0)
start += this.length
if (end < 0)
end += this.length
var startOffset = this._offset(start)
, endOffset = this._offset(end)
, buffers = this._bufs.slice(startOffset[0], endOffset[0] + 1)
if (endOffset[1] == 0)
buffers.pop()
else
buffers[buffers.length-1] = buffers[buffers.length-1].slice(0, endOffset[1])
if (startOffset[1] != 0)
buffers[0] = buffers[0].slice(startOffset[1])
return new BufferList(buffers)
}
BufferList.prototype.toString = function toString (encoding, start, end) {
return this.slice(start, end).toString(encoding)
}
BufferList.prototype.consume = function consume (bytes) {
// first, normalize the argument, in accordance with how Buffer does it
bytes = Math.trunc(bytes)
// do nothing if not a positive number
if (Number.isNaN(bytes) || bytes <= 0) return this
while (this._bufs.length) {
if (bytes >= this._bufs[0].length) {
bytes -= this._bufs[0].length
this.length -= this._bufs[0].length
this._bufs.shift()
} else {
this._bufs[0] = this._bufs[0].slice(bytes)
this.length -= bytes
break
}
}
return this
}
BufferList.prototype.duplicate = function duplicate () {
var i = 0
, copy = new BufferList()
for (; i < this._bufs.length; i++)
copy.append(this._bufs[i])
return copy
}
BufferList.prototype.destroy = function destroy () {
this._bufs.length = 0
this.length = 0
this.push(null)
}
;(function () {
var methods = {
'readDoubleBE' : 8
, 'readDoubleLE' : 8
, 'readFloatBE' : 4
, 'readFloatLE' : 4
, 'readInt32BE' : 4
, 'readInt32LE' : 4
, 'readUInt32BE' : 4
, 'readUInt32LE' : 4
, 'readInt16BE' : 2
, 'readInt16LE' : 2
, 'readUInt16BE' : 2
, 'readUInt16LE' : 2
, 'readInt8' : 1
, 'readUInt8' : 1
}
for (var m in methods) {
(function (m) {
BufferList.prototype[m] = function (offset) {
return this.slice(offset, offset + methods[m])[m](0)
}
}(m))
}
}())
module.exports = BufferList
{
"name": "bl",
"version": "1.2.3",
"description": "Buffer List: collect buffers and access with a standard readable Buffer interface, streamable too!",
"main": "bl.js",
"scripts": {
"test": "node test/test.js | faucet"
},
"repository": {
"type": "git",
"url": "https://github.com/rvagg/bl.git"
},
"homepage": "https://github.com/rvagg/bl",
"authors": [
"Rod Vagg <rod@vagg.org> (https://github.com/rvagg)",
"Matteo Collina <matteo.collina@gmail.com> (https://github.com/mcollina)",
"Jarett Cruger <jcrugzz@gmail.com> (https://github.com/jcrugzz)"
],
"keywords": [
"buffer",
"buffers",
"stream",
"awesomesauce"
],
"license": "MIT",
"dependencies": {
"readable-stream": "^2.3.5",
"safe-buffer": "^5.1.1"
},
"devDependencies": {
"faucet": "0.0.1",
"hash_file": "~0.1.1",
"tape": "~4.9.0"
}
}
This diff is collapsed.
function allocUnsafe (size) {
if (typeof size !== 'number') {
throw new TypeError('"size" argument must be a number')
}
if (size < 0) {
throw new RangeError('"size" argument must not be negative')
}
if (Buffer.allocUnsafe) {
return Buffer.allocUnsafe(size)
} else {
return new Buffer(size)
}
}
module.exports = allocUnsafe
{
"name": "buffer-alloc-unsafe",
"version": "1.1.0",
"license": "MIT",
"repository": "LinusU/buffer-alloc-unsafe",
"files": [
"index.js"
],
"scripts": {
"test": "standard && node test"
},
"devDependencies": {
"standard": "^7.1.2"
},
"keywords": [
"allocUnsafe",
"allocate",
"buffer allocUnsafe",
"buffer unsafe allocate",
"buffer",
"ponyfill",
"unsafe allocate"
]
}
# Buffer Alloc Unsafe
A [ponyfill](https://ponyfill.com) for `Buffer.allocUnsafe`.
Works as Node.js: `v7.0.0` <br>
Works on Node.js: `v0.10.0`
## Installation
```sh
npm install --save buffer-alloc-unsafe
```
## Usage
```js
const allocUnsafe = require('buffer-alloc-unsafe')
console.log(allocUnsafe(10))
//=> <Buffer 78 0c 80 03 01 00 00 00 05 00>
console.log(allocUnsafe(10))
//=> <Buffer 58 ed bf 5f ff 7f 00 00 01 00>
console.log(allocUnsafe(10))
//=> <Buffer 50 0c 80 03 01 00 00 00 0a 00>
allocUnsafe(-10)
//=> RangeError: "size" argument must not be negative
```
## API
### allocUnsafe(size)
- `size` &lt;Integer&gt; The desired length of the new `Buffer`
Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must be
less than or equal to the value of `buffer.kMaxLength` and greater than or equal
to zero. Otherwise, a `RangeError` is thrown.
## See also
- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc`
- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill`
- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
var bufferFill = require('buffer-fill')
var allocUnsafe = require('buffer-alloc-unsafe')
module.exports = function alloc (size, fill, encoding) {
if (typeof size !== 'number') {
throw new TypeError('"size" argument must be a number')
}
if (size < 0) {
throw new RangeError('"size" argument must not be negative')
}
if (Buffer.alloc) {
return Buffer.alloc(size, fill, encoding)
}
var buffer = allocUnsafe(size)
if (size === 0) {
return buffer
}
if (fill === undefined) {
return bufferFill(buffer, 0)
}
if (typeof encoding !== 'string') {
encoding = undefined
}
return bufferFill(buffer, fill, encoding)
}
{
"name": "buffer-alloc",
"version": "1.2.0",
"license": "MIT",
"repository": "LinusU/buffer-alloc",
"files": [
"index.js"
],
"scripts": {
"test": "standard && node test"
},
"dependencies": {
"buffer-alloc-unsafe": "^1.1.0",
"buffer-fill": "^1.0.0"
},
"devDependencies": {
"standard": "^7.1.2"
},
"keywords": [
"alloc",
"allocate",
"buffer alloc",
"buffer allocate",
"buffer"
]
}
# Buffer Alloc
A [ponyfill](https://ponyfill.com) for `Buffer.alloc`.
Works as Node.js: `v7.0.0` <br>
Works on Node.js: `v0.10.0`
## Installation
```sh
npm install --save buffer-alloc
```
## Usage
```js
const alloc = require('buffer-alloc')
console.log(alloc(4))
//=> <Buffer 00 00 00 00>
console.log(alloc(6, 0x41))
//=> <Buffer 41 41 41 41 41 41>
console.log(alloc(10, 'linus', 'utf8'))
//=> <Buffer 6c 69 6e 75 73 6c 69 6e 75 73>
```
## API
### alloc(size[, fill[, encoding]])
- `size` &lt;Integer&gt; The desired length of the new `Buffer`
- `fill` &lt;String&gt; | &lt;Buffer&gt; | &lt;Integer&gt; A value to pre-fill the new `Buffer` with. **Default:** `0`
- `encoding` &lt;String&gt; If `fill` is a string, this is its encoding. **Default:** `'utf8'`
Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the `Buffer` will be zero-filled.
## See also
- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe`
- [buffer-fill](https://github.com/LinusU/buffer-fill) A ponyfill for `Buffer.fill`
- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
The MIT License
Copyright (c) 2013 Brian J. Brennan
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# buffer-crc32
[![Build Status](https://secure.travis-ci.org/brianloveswords/buffer-crc32.png?branch=master)](http://travis-ci.org/brianloveswords/buffer-crc32)
crc32 that works with binary data and fancy character sets, outputs
buffer, signed or unsigned data and has tests.
Derived from the sample CRC implementation in the PNG specification: http://www.w3.org/TR/PNG/#D-CRCAppendix
# install
```
npm install buffer-crc32
```
# example
```js
var crc32 = require('buffer-crc32');
// works with buffers
var buf = Buffer([0x00, 0x73, 0x75, 0x70, 0x20, 0x62, 0x72, 0x6f, 0x00])
crc32(buf) // -> <Buffer 94 5a ab 4a>
// has convenience methods for getting signed or unsigned ints
crc32.signed(buf) // -> -1805997238
crc32.unsigned(buf) // -> 2488970058
// will cast to buffer if given a string, so you can
// directly use foreign characters safely
crc32('自動販売機') // -> <Buffer cb 03 1a c5>
// and works in append mode too
var partialCrc = crc32('hey');
var partialCrc = crc32(' ', partialCrc);
var partialCrc = crc32('sup', partialCrc);
var partialCrc = crc32(' ', partialCrc);
var finalCrc = crc32('bros', partialCrc); // -> <Buffer 47 fa 55 70>
```
# tests
This was tested against the output of zlib's crc32 method. You can run
the tests with`npm test` (requires tap)
# see also
https://github.com/alexgorbatchev/node-crc, `crc.buffer.crc32` also
supports buffer inputs and return unsigned ints (thanks @tjholowaychuk).
# license
MIT/X11
var Buffer = require('buffer').Buffer;
var CRC_TABLE = [
0x00000000, 0x77073096, 0xee0e612c, 0x990951ba, 0x076dc419,
0x706af48f, 0xe963a535, 0x9e6495a3, 0x0edb8832, 0x79dcb8a4,
0xe0d5e91e, 0x97d2d988, 0x09b64c2b, 0x7eb17cbd, 0xe7b82d07,
0x90bf1d91, 0x1db71064, 0x6ab020f2, 0xf3b97148, 0x84be41de,
0x1adad47d, 0x6ddde4eb, 0xf4d4b551, 0x83d385c7, 0x136c9856,
0x646ba8c0, 0xfd62f97a, 0x8a65c9ec, 0x14015c4f, 0x63066cd9,
0xfa0f3d63, 0x8d080df5, 0x3b6e20c8, 0x4c69105e, 0xd56041e4,
0xa2677172, 0x3c03e4d1, 0x4b04d447, 0xd20d85fd, 0xa50ab56b,
0x35b5a8fa, 0x42b2986c, 0xdbbbc9d6, 0xacbcf940, 0x32d86ce3,
0x45df5c75, 0xdcd60dcf, 0xabd13d59, 0x26d930ac, 0x51de003a,
0xc8d75180, 0xbfd06116, 0x21b4f4b5, 0x56b3c423, 0xcfba9599,
0xb8bda50f, 0x2802b89e, 0x5f058808, 0xc60cd9b2, 0xb10be924,
0x2f6f7c87, 0x58684c11, 0xc1611dab, 0xb6662d3d, 0x76dc4190,
0x01db7106, 0x98d220bc, 0xefd5102a, 0x71b18589, 0x06b6b51f,
0x9fbfe4a5, 0xe8b8d433, 0x7807c9a2, 0x0f00f934, 0x9609a88e,
0xe10e9818, 0x7f6a0dbb, 0x086d3d2d, 0x91646c97, 0xe6635c01,
0x6b6b51f4, 0x1c6c6162, 0x856530d8, 0xf262004e, 0x6c0695ed,
0x1b01a57b, 0x8208f4c1, 0xf50fc457, 0x65b0d9c6, 0x12b7e950,
0x8bbeb8ea, 0xfcb9887c, 0x62dd1ddf, 0x15da2d49, 0x8cd37cf3,
0xfbd44c65, 0x4db26158, 0x3ab551ce, 0xa3bc0074, 0xd4bb30e2,
0x4adfa541, 0x3dd895d7, 0xa4d1c46d, 0xd3d6f4fb, 0x4369e96a,
0x346ed9fc, 0xad678846, 0xda60b8d0, 0x44042d73, 0x33031de5,
0xaa0a4c5f, 0xdd0d7cc9, 0x5005713c, 0x270241aa, 0xbe0b1010,
0xc90c2086, 0x5768b525, 0x206f85b3, 0xb966d409, 0xce61e49f,
0x5edef90e, 0x29d9c998, 0xb0d09822, 0xc7d7a8b4, 0x59b33d17,
0x2eb40d81, 0xb7bd5c3b, 0xc0ba6cad, 0xedb88320, 0x9abfb3b6,
0x03b6e20c, 0x74b1d29a, 0xead54739, 0x9dd277af, 0x04db2615,
0x73dc1683, 0xe3630b12, 0x94643b84, 0x0d6d6a3e, 0x7a6a5aa8,
0xe40ecf0b, 0x9309ff9d, 0x0a00ae27, 0x7d079eb1, 0xf00f9344,
0x8708a3d2, 0x1e01f268, 0x6906c2fe, 0xf762575d, 0x806567cb,
0x196c3671, 0x6e6b06e7, 0xfed41b76, 0x89d32be0, 0x10da7a5a,
0x67dd4acc, 0xf9b9df6f, 0x8ebeeff9, 0x17b7be43, 0x60b08ed5,
0xd6d6a3e8, 0xa1d1937e, 0x38d8c2c4, 0x4fdff252, 0xd1bb67f1,
0xa6bc5767, 0x3fb506dd, 0x48b2364b, 0xd80d2bda, 0xaf0a1b4c,
0x36034af6, 0x41047a60, 0xdf60efc3, 0xa867df55, 0x316e8eef,
0x4669be79, 0xcb61b38c, 0xbc66831a, 0x256fd2a0, 0x5268e236,
0xcc0c7795, 0xbb0b4703, 0x220216b9, 0x5505262f, 0xc5ba3bbe,
0xb2bd0b28, 0x2bb45a92, 0x5cb36a04, 0xc2d7ffa7, 0xb5d0cf31,
0x2cd99e8b, 0x5bdeae1d, 0x9b64c2b0, 0xec63f226, 0x756aa39c,
0x026d930a, 0x9c0906a9, 0xeb0e363f, 0x72076785, 0x05005713,
0x95bf4a82, 0xe2b87a14, 0x7bb12bae, 0x0cb61b38, 0x92d28e9b,
0xe5d5be0d, 0x7cdcefb7, 0x0bdbdf21, 0x86d3d2d4, 0xf1d4e242,
0x68ddb3f8, 0x1fda836e, 0x81be16cd, 0xf6b9265b, 0x6fb077e1,
0x18b74777, 0x88085ae6, 0xff0f6a70, 0x66063bca, 0x11010b5c,
0x8f659eff, 0xf862ae69, 0x616bffd3, 0x166ccf45, 0xa00ae278,
0xd70dd2ee, 0x4e048354, 0x3903b3c2, 0xa7672661, 0xd06016f7,
0x4969474d, 0x3e6e77db, 0xaed16a4a, 0xd9d65adc, 0x40df0b66,
0x37d83bf0, 0xa9bcae53, 0xdebb9ec5, 0x47b2cf7f, 0x30b5ffe9,
0xbdbdf21c, 0xcabac28a, 0x53b39330, 0x24b4a3a6, 0xbad03605,
0xcdd70693, 0x54de5729, 0x23d967bf, 0xb3667a2e, 0xc4614ab8,
0x5d681b02, 0x2a6f2b94, 0xb40bbe37, 0xc30c8ea1, 0x5a05df1b,
0x2d02ef8d
];
if (typeof Int32Array !== 'undefined') {
CRC_TABLE = new Int32Array(CRC_TABLE);
}
function ensureBuffer(input) {
if (Buffer.isBuffer(input)) {
return input;
}
var hasNewBufferAPI =
typeof Buffer.alloc === "function" &&
typeof Buffer.from === "function";
if (typeof input === "number") {
return hasNewBufferAPI ? Buffer.alloc(input) : new Buffer(input);
}
else if (typeof input === "string") {
return hasNewBufferAPI ? Buffer.from(input) : new Buffer(input);
}
else {
throw new Error("input must be buffer, number, or string, received " +
typeof input);
}
}
function bufferizeInt(num) {
var tmp = ensureBuffer(4);
tmp.writeInt32BE(num, 0);
return tmp;
}
function _crc32(buf, previous) {
buf = ensureBuffer(buf);
if (Buffer.isBuffer(previous)) {
previous = previous.readUInt32BE(0);
}
var crc = ~~previous ^ -1;
for (var n = 0; n < buf.length; n++) {
crc = CRC_TABLE[(crc ^ buf[n]) & 0xff] ^ (crc >>> 8);
}
return (crc ^ -1);
}
function crc32() {
return bufferizeInt(_crc32.apply(null, arguments));
}
crc32.signed = function () {
return _crc32.apply(null, arguments);
};
crc32.unsigned = function () {
return _crc32.apply(null, arguments) >>> 0;
};
module.exports = crc32;
{
"author": "Brian J. Brennan <brianloveswords@gmail.com>",
"name": "buffer-crc32",
"description": "A pure javascript CRC32 algorithm that plays nice with binary data",
"version": "0.2.13",
"licenses": [
{
"type": "MIT",
"url": "https://github.com/brianloveswords/buffer-crc32/raw/master/LICENSE"
}
],
"contributors": [
{
"name": "Vladimir Kuznetsov",
"github": "mistakster"
}
],
"homepage": "https://github.com/brianloveswords/buffer-crc32",
"repository": {
"type": "git",
"url": "git://github.com/brianloveswords/buffer-crc32.git"
},
"main": "index.js",
"scripts": {
"test": "./node_modules/.bin/tap tests/*.test.js"
},
"dependencies": {},
"devDependencies": {
"tap": "~0.2.5"
},
"optionalDependencies": {},
"engines": {
"node": "*"
},
"license": "MIT",
"files": [
"index.js"
]
}
/* Node.js 6.4.0 and up has full support */
var hasFullSupport = (function () {
try {
if (!Buffer.isEncoding('latin1')) {
return false
}
var buf = Buffer.alloc ? Buffer.alloc(4) : new Buffer(4)
buf.fill('ab', 'ucs2')
return (buf.toString('hex') === '61006200')
} catch (_) {
return false
}
}())
function isSingleByte (val) {
return (val.length === 1 && val.charCodeAt(0) < 256)
}
function fillWithNumber (buffer, val, start, end) {
if (start < 0 || end > buffer.length) {
throw new RangeError('Out of range index')
}
start = start >>> 0
end = end === undefined ? buffer.length : end >>> 0
if (end > start) {
buffer.fill(val, start, end)
}
return buffer
}
function fillWithBuffer (buffer, val, start, end) {
if (start < 0 || end > buffer.length) {
throw new RangeError('Out of range index')
}
if (end <= start) {
return buffer
}
start = start >>> 0
end = end === undefined ? buffer.length : end >>> 0
var pos = start
var len = val.length
while (pos <= (end - len)) {
val.copy(buffer, pos)
pos += len
}
if (pos !== end) {
val.copy(buffer, pos, 0, end - pos)
}
return buffer
}
function fill (buffer, val, start, end, encoding) {
if (hasFullSupport) {
return buffer.fill(val, start, end, encoding)
}
if (typeof val === 'number') {
return fillWithNumber(buffer, val, start, end)
}
if (typeof val === 'string') {
if (typeof start === 'string') {
encoding = start
start = 0
end = buffer.length
} else if (typeof end === 'string') {
encoding = end
end = buffer.length
}
if (encoding !== undefined && typeof encoding !== 'string') {
throw new TypeError('encoding must be a string')
}
if (encoding === 'latin1') {
encoding = 'binary'
}
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
throw new TypeError('Unknown encoding: ' + encoding)
}
if (val === '') {
return fillWithNumber(buffer, 0, start, end)
}
if (isSingleByte(val)) {
return fillWithNumber(buffer, val.charCodeAt(0), start, end)
}
val = new Buffer(val, encoding)
}
if (Buffer.isBuffer(val)) {
return fillWithBuffer(buffer, val, start, end)
}
// Other values (e.g. undefined, boolean, object) results in zero-fill
return fillWithNumber(buffer, 0, start, end)
}
module.exports = fill
{
"name": "buffer-fill",
"version": "1.0.0",
"license": "MIT",
"repository": "LinusU/buffer-fill",
"files": [
"index.js"
],
"scripts": {
"test": "standard && node test"
},
"devDependencies": {
"buffer-alloc-unsafe": "^1.1.0",
"standard": "^7.1.2"
}
}
# Buffer Fill
A [ponyfill](https://ponyfill.com) for `Buffer.fill`.
Works as Node.js: `v6.4.0` <br>
Works on Node.js: `v0.10.0`
## Installation
```sh
npm install --save buffer-fill
```
## Usage
```js
const fill = require('buffer-fill')
const buf = Buffer.allocUnsafe(5)
console.log(buf.fill(8))
//=> <Buffer 08 08 08 08 08>
console.log(buf.fill(9, 2, 4))
//=> <Buffer 08 08 09 09 08>
console.log(buf.fill('linus', 'latin1'))
//=> <Buffer 6c 69 6e 75 73>
console.log(buf.fill('\u0222'))
//=> <Buffer c8 a2 c8 a2 c8>
```
## API
### fill(buf, value[, offset[, end]][, encoding])
- `value` &lt;String&gt; | &lt;Buffer&gt; | &lt;Integer&gt; The value to fill `buf` with
- `offset` &lt;Integer&gt; Where to start filling `buf`. **Default:** `0`
- `end` &lt;Integer&gt; Where to stop filling `buf` (not inclusive). **Default:** `buf.length`
- `encoding` &lt;String&gt; If `value` is a string, this is its encoding. **Default:** `'utf8'`
- Return: &lt;Buffer&gt; A reference to `buf`
Fills `buf` with the specified `value`. If the `offset` and `end` are not given,
the entire `buf` will be filled. This is meant to be a small simplification to
allow the creation and filling of a `Buffer` to be done on a single line.
If the final write of a `fill()` operation falls on a multi-byte character, then
only the first bytes of that character that fit into `buf` are written.
## See also
- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe`
- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc`
- [buffer-from](https://github.com/LinusU/buffer-from) A ponyfill for `Buffer.from`
1.5.1 / 2020-05-11
==================
**fixes**
* [[`f516814`](http://github.com/node-modules/compressing/commit/f51681490aeea44a7b27ec0c09d3fb3d0385c5c0)] - fix: index.d.ts streamHeader.name wrong declearing (#46) (shadyzoz <<ShadyZOZ@users.noreply.github.com>>)
1.5.0 / 2019-12-04
==================
**features**
* [[`15c29e9`](http://github.com/node-modules/compressing/commit/15c29e9893880d2c19c343d133edb50f0c55c713)] - feat: zip format support custom fileName encoding (#36) (fengmk2 <<fengmk2@gmail.com>>)
**fixes**
* [[`7d605fe`](http://github.com/node-modules/compressing/commit/7d605fe01a88bc6aab9a2b06a8725545f591bab9)] - fix: typescript error(#29) (Ruanyq <<yiqiang0930@163.com>>)
**others**
* [[`4808fb8`](http://github.com/node-modules/compressing/commit/4808fb8e1d6cbbb31c0e82c359ec04eccb0c1eaf)] - test: add node 11 (#20) (fengmk2 <<fengmk2@gmail.com>>)
1.4.0 / 2018-11-30
==================
**others**
* [[`1f352c8`](http://github.com/node-modules/compressing/commit/1f352c88028acf27c1881fd45d555094cb279c44)] - docs: add index.d.ts and test case (#17) (DiamondYuan <<541832074@qq.com>>)
1.3.2 / 2018-11-21
==================
**fixes**
* [[`3713a0b`](http://github.com/node-modules/compressing/commit/3713a0b8d5b03d61c111afbbd4b6226169afeb14)] - fix: handle error from yazl when file not exists (#19) (DiamondYuan <<541832074@qq.com>>)
1.3.1 / 2018-08-24
==================
**fixes**
* [[`b802819`](http://github.com/node-modules/compressing/commit/b8028195dd6e7200ff47c8f43f695d24838e986b)] - fix: keep stat mode when compress tar or tgz (#11) (Haoliang Gao <<sakura9515@gmail.com>>)
1.3.0 / 2018-08-13
==================
**features**
* [[`04feafa`](http://github.com/node-modules/compressing/commit/04feafa6a290d877044ed162ca4c7dcdc5e54e87)] - feat: support absolute path zip file (#10) (fengmk2 <<fengmk2@gmail.com>>)
1.2.4 / 2018-07-13
==================
* chore: replace multipipe with pump (#9)
1.2.3 / 2017-07-27
==================
* fix: should resolve when all fileWriteStream finished (#7)
1.2.2 / 2017-07-06
==================
* fix: make file mode correct (#6)
1.2.1 / 2017-07-01
==================
* test: fix test on Windows (#4)
1.2.0 / 2017-07-01
==================
* feat: add strip option when uncompress zip
1.1.0 / 2017-02-14
==================
* feat: uncompress (#2)
1.0.0 / 2016-12-24
==================
* rename to compressing
* feat: 1st implementation
* init
MIT License
Copyright (c) 2017-present node-modules and other contributors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
This diff is collapsed.
import { ReadStream, WriteStream } from 'fs'
type sourceType = string | Buffer | ReadStream
type destType = string | WriteStream
interface streamEntryOpts {
relativePath?: string
ignoreBase?: boolean
size?: number
}
interface streamHeader {
type: 'file' | 'directory',
name: string
}
export namespace gzip {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: destType, opts?: any): Promise<void>
export class FileStream extends ReadStream {
constructor(opts?: {
zlib?: object,
source: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
zlib?: object,
source: sourceType
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace tar {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
constructor(opts?: {
relativePath?: string,
size?: number,
suppressSizeWarning?: boolean,
source?: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source: sourceType
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace tgz {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
constructor(opts?: {
relativePath?: string,
size?: number,
suppressSizeWarning?: boolean,
zlib?: object,
source?: sourceType
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source?: sourceType,
strip?: number
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
export namespace zip {
function compressFile(source: sourceType, dest: destType, opts?: any): Promise<void>
function compressDir(source: sourceType, dest: destType, opts?: any): Promise<void>
function uncompress(source: sourceType, dest: string, opts?: any): Promise<void>
export class Stream extends ReadStream {
constructor();
addEntry(entry: string, opts?: streamEntryOpts): void
addEntry(entry: Buffer | ReadStream, opts: streamEntryOpts): void
}
export class FileStream extends ReadStream {
/**
* If opts.source is a file path, opts.relativePath is optional, otherwise it's required.
*
* @param opts
*/
constructor(opts?: {
relativePath?: string,
yazl?: object,
source: string
} | {
relativePath: string,
yazl?: object,
source?: Buffer | ReadStream
});
}
export class UncompressStream extends WriteStream {
constructor(opts?: {
source?: sourceType,
strip?: number,
zipFileNameEncoding?: string
});
on(event: string, listener: (...args: any[]) => void): this
on(event: 'entry', listener: (header: streamHeader, stream: WriteStream, next: () => void) => void): this
on(event: 'finish', listener: () => void): this
on(event: 'error', listener: (err: Error) => void): this
}
}
'use strict';
exports.zip = require('./lib/zip');
exports.gzip = require('./lib/gzip');
exports.tar = require('./lib/tar');
exports.tgz = require('./lib/tgz');
'use strict';
const stream = require('stream');
class BaseStream extends stream.Readable {
addEntry(/* entry, opts */) {
throw new Error('.addEntry not implemented in sub class!');
}
_read() {}
emit(event, data) {
if (event === 'error') {
const error = data;
if (error.name === 'Error') {
error.name = this.constructor.name + 'Error';
}
}
super.emit(event, data);
}
}
module.exports = BaseStream;
'use strict';
const stream = require('stream');
class UncompressBaseStream extends stream.Writable {
emit(event, data) {
if (event === 'error') {
const error = data;
if (error.name === 'Error') {
error.name = this.constructor.name + 'Error';
}
}
super.emit.apply(this, arguments);
}
}
module.exports = UncompressBaseStream;
'use strict';
const fs = require('fs');
const zlib = require('zlib');
const utils = require('../utils');
const streamifier = require('streamifier');
class GzipFileStream extends zlib.Gzip {
constructor(opts) {
opts = opts || {};
super(opts.zlib);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else undefined: do nothing
}
}
module.exports = GzipFileStream;
'use strict';
const utils = require('../utils');
const GzipFileStream = require('./file_stream');
const GzipUncompressStream = require('./uncompress_stream');
exports.FileStream = GzipFileStream;
exports.UncompressStream = GzipUncompressStream;
exports.compressFile = utils.makeFileProcessFn(GzipFileStream);
exports.uncompress = utils.makeFileProcessFn(GzipUncompressStream);
'use strict';
const fs = require('fs');
const zlib = require('zlib');
const utils = require('../utils');
const streamifier = require('streamifier');
class GzipUncompressStream extends zlib.Unzip {
constructor(opts) {
opts = opts || {};
super(opts.zlib);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
}
module.exports = GzipUncompressStream;
'use strict';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const tar = require('tar-stream');
const utils = require('../utils');
const ready = require('get-ready');
class TarFileStream extends stream.Transform {
constructor(opts) {
super(opts);
const pack = tar.pack();
pack.on('data', chunk => this.push(chunk));
pack.on('end', () => this.ready(true));
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
// stat file to get file size
fs.stat(opts.source, (err, stat) => {
if (err) return this.emit('error', err);
this.entry = pack.entry({ name: opts.relativePath || path.basename(opts.source), size: stat.size, mode: stat.mode & 0o777 }, err => {
if (err) return this.emit('error', err);
pack.finalize();
});
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
});
} else if (sourceType === 'buffer') {
if (!opts.relativePath) return this.emit('error', 'opts.relativePath is required if opts.source is a buffer');
pack.entry({ name: opts.relativePath }, opts.source);
pack.finalize();
this.end();
} else { // stream or undefined
if (!opts.relativePath) return process.nextTick(() => this.emit('error', 'opts.relativePath is required'));
if (opts.size) {
this.entry = pack.entry({ name: opts.relativePath, size: opts.size }, err => {
if (err) return this.emit('error', err);
pack.finalize();
});
} else {
if (!opts.suppressSizeWarning) {
console.warn('You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
}
const buf = [];
this.entry = new stream.Writable({
write(chunk, _, callback) {
buf.push(chunk);
callback();
},
});
this.entry.on('finish', () => {
pack.entry({ name: opts.relativePath }, Buffer.concat(buf));
pack.finalize();
});
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
}
}
_transform(chunk, encoding, callback) {
if (this.entry) {
this.entry.write(chunk, encoding, callback);
}
}
_flush(callback) {
if (this.entry) {
this.entry.end();
}
this.ready(callback);
}
}
ready.mixin(TarFileStream.prototype);
module.exports = TarFileStream;
'use strict';
const utils = require('../utils');
const TarStream = require('./stream');
const TarFileStream = require('./file_stream');
const TarUncompressStream = require('./uncompress_stream');
exports.Stream = TarStream;
exports.FileStream = TarFileStream;
exports.UncompressStream = TarUncompressStream;
exports.compressDir = utils.makeCompressDirFn(TarStream);
exports.compressFile = utils.makeFileProcessFn(TarFileStream);
exports.uncompress = utils.makeUncompressFn(TarUncompressStream);
'use strict';
const fs = require('fs');
const path = require('path');
const stream = require('stream');
const tar = require('tar-stream');
const utils = require('../utils');
const BaseStream = require('../base_stream');
class TarStream extends BaseStream {
constructor(opts) {
super(opts);
this._waitingEntries = [];
this._processing = false;
this._init(opts);
}
_init() {
const pack = this._pack = tar.pack();
pack.on('end', () => this.push(null));
pack.on('data', chunk => this.push(chunk));
pack.on('error', err => this.emit('error', err));
}
addEntry(entry, opts) {
if (this._processing) {
return this._waitingEntries.push([ entry, opts ]);
}
opts = opts || {};
this._processing = true;
const entryType = utils.entryType(entry);
if (!entryType) return; // TODO
if (entryType === 'fileOrDir') {
this._addFileOrDirEntry(entry, opts);
} else if (entryType === 'buffer') {
this._addBufferEntry(entry, opts);
} else { // stream
this._addStreamEntry(entry, opts);
}
}
_addFileOrDirEntry(entry, opts) {
fs.stat(entry, (err, stat) => {
if (err) return this.emit('error', err);
if (stat.isDirectory()) return this._addDirEntry(entry, opts);
if (stat.isFile()) return this._addFileEntry(entry, opts);
const illigalEntryError = new Error('Type is not supported, must be a file path, directory path, file buffer, or a readable stream');
illigalEntryError.name = 'IlligalEntryError';
this.emit('error', illigalEntryError);
});
}
_addFileEntry(entry, opts) {
// stat file to get file size
fs.stat(entry, (err, stat) => {
if (err) return this.emit('error', err);
const entryStream = this._pack.entry({ name: opts.relativePath || path.basename(entry), size: stat.size, mode: stat.mode & 0o777 }, this._onEntryFinish.bind(this));
const stream = fs.createReadStream(entry, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(entryStream);
});
}
_addDirEntry(entry, opts) {
fs.readdir(entry, (err, files) => {
if (err) return this.emit('error', err);
const relativePath = opts.relativePath || '';
files.forEach(fileOrDir => {
const newOpts = utils.clone(opts);
if (opts.ignoreBase) {
newOpts.relativePath = path.join(relativePath, fileOrDir);
} else {
newOpts.relativePath = path.join(relativePath, path.basename(entry), fileOrDir);
}
newOpts.ignoreBase = true;
this.addEntry(path.join(entry, fileOrDir), newOpts);
});
this._onEntryFinish();
});
}
_addBufferEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', 'opts.relativePath is required if entry is a buffer');
this._pack.entry({ name: opts.relativePath }, entry, this._onEntryFinish.bind(this));
}
_addStreamEntry(entry, opts) {
entry.on('error', err => this.emit('error', err));
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required'));
if (opts.size) {
const entryStream = this._pack.entry({ name: opts.relativePath, size: opts.size }, this._onEntryFinish.bind(this));
entry.pipe(entryStream);
} else {
if (!opts.suppressSizeWarning) {
console.warn('You should specify the size of streamming data by opts.size to prevent all streaming data from loading into memory. If you are sure about memory cost, pass opts.supressSizeWarning: true to suppress this warning');
}
const buf = [];
const collectStream = new stream.Writable({
write(chunk, _, callback) {
buf.push(chunk);
callback();
},
});
collectStream.on('error', err => this.emit('error', err));
collectStream.on('finish', () => {
this._pack.entry({ name: opts.relativePath }, Buffer.concat(buf), this._onEntryFinish.bind(this));
});
entry.pipe(collectStream);
}
}
_read() {}
_onEntryFinish(err) {
if (err) return this.emit('error', err);
this._processing = false;
const waitingEntry = this._waitingEntries.shift();
if (waitingEntry) {
this.addEntry.apply(this, waitingEntry);
} else {
this._finalize();
}
}
_finalize() {
this._pack.finalize();
}
}
module.exports = TarStream;
'use strict';
const fs = require('fs');
const tar = require('tar-stream');
const utils = require('../utils');
const streamifier = require('streamifier');
// stream.Writable
class TarUncompressStream extends tar.extract {
constructor(opts) {
opts = opts || {};
super(opts);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
}
module.exports = TarUncompressStream;
'use strict';
const tar = require('../tar');
const gzip = require('../gzip');
const utils = require('../utils');
const stream = require('stream');
const pump = require('pump');
const ready = require('get-ready');
class TgzFileStream extends stream.Transform {
constructor(opts) {
opts = opts || {};
super(opts);
const sourceType = this._sourceType = utils.sourceType(opts.source);
const tarStream = this._tarStream = new tar.FileStream(opts);
opts = utils.clone(opts);
delete opts.source;
const gzipStream = new gzip.FileStream(opts);
gzipStream.on('data', chunk => {
this.push(chunk);
});
gzipStream.on('end', () => this.ready(true));
pump(tarStream, gzipStream, err => {
err && this.emit('error', err);
});
if (sourceType !== 'stream' && sourceType !== undefined) {
this.end();
}
}
_transform(chunk, encoding, callback) {
this._tarStream.write(chunk, encoding, callback);
}
_flush(callback) {
if (this._sourceType === 'stream' || this._sourceType === undefined) {
this._tarStream.end();
}
this.ready(callback);
}
}
ready.mixin(TgzFileStream.prototype);
module.exports = TgzFileStream;
'use strict';
const utils = require('../utils');
const TgzStream = require('./stream');
const TgzFileStream = require('./file_stream');
const TgzUncompressStream = require('./uncompress_stream');
exports.Stream = TgzStream;
exports.FileStream = TgzFileStream;
exports.UncompressStream = TgzUncompressStream;
exports.compressDir = utils.makeCompressDirFn(TgzStream);
exports.compressFile = utils.makeFileProcessFn(TgzFileStream);
exports.uncompress = utils.makeUncompressFn(TgzUncompressStream);
'use strict';
const tar = require('../tar');
const gzip = require('../gzip');
const BaseStream = require('../base_stream');
class TgzStream extends BaseStream {
constructor(opts) {
super(opts);
const tarStream = this._tarStream = new tar.Stream();
tarStream.on('error', err => this.emit('error', err));
const gzipStream = new gzip.FileStream();
gzipStream.on('end', () => this.push(null));
gzipStream.on('data', chunk => this.push(chunk));
gzipStream.on('error', err => this.emit('error', err));
tarStream.pipe(gzipStream);
}
addEntry(entry, opts) {
this._tarStream.addEntry(entry, opts);
}
}
module.exports = TgzStream;
'use strict';
const fs = require('fs');
const utils = require('../utils');
const ready = require('get-ready');
const streamifier = require('streamifier');
const FlushWritable = require('flushwritable');
const GzipUncompressStream = require('../gzip').UncompressStream;
const TarUncompressStream = require('../tar').UncompressStream;
class TgzUncompressStream extends FlushWritable {
constructor(opts) {
opts = opts || {};
super(opts);
const newOpts = utils.clone(opts);
newOpts.source = undefined;
this._gzipStream = new GzipUncompressStream(newOpts)
.on('error', err => this.emit('error', err));
const tarStream = new TarUncompressStream(newOpts)
.on('finish', () => this.ready(true))
.on('entry', this.emit.bind(this, 'entry'))
.on('error', err => this.emit('error', err));
this._gzipStream.pipe(tarStream);
const sourceType = utils.sourceType(opts.source);
if (sourceType === 'file') {
const stream = fs.createReadStream(opts.source, opts.fs);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'buffer') {
const stream = streamifier.createReadStream(opts.source, opts.streamifier);
stream.on('error', err => this.emit('error', err));
stream.pipe(this);
return;
}
if (sourceType === 'stream') {
opts.source.on('error', err => this.emit('error', err));
opts.source.pipe(this);
}
// else: waiting to be piped
}
_write(chunk, encoding, callback) {
this._gzipStream.write(chunk, encoding, callback);
}
_flush(callback) {
this._gzipStream.end();
this.ready(callback);
}
}
ready.mixin(TgzUncompressStream.prototype);
module.exports = TgzUncompressStream;
'use strict';
const fs = require('fs');
const path = require('path');
const mkdirp = require('mkdirp');
const pump = require('pump');
// file/fileBuffer/stream
exports.sourceType = source => {
if (!source) return undefined;
if (source instanceof Buffer) return 'buffer';
if (typeof source._read === 'function' || typeof source._transform === 'function') return 'stream';
if (typeof source !== 'string') {
const err = new Error('Type is not supported, must be a file path, file buffer, or a readable stream');
err.name = 'IlligalSourceError';
throw err;
}
return 'file';
};
function destType(dest) {
if (typeof dest._write === 'function' || typeof dest._transform === 'function') return 'stream';
if (typeof dest !== 'string') {
const err = new Error('Type is not supported, must be a file path, or a writable stream');
err.name = 'IlligalDestinationError';
throw err;
}
return 'path';
}
exports.destType = destType;
const illigalEntryError = new Error('Type is not supported, must be a file path, directory path, file buffer, or a readable stream');
illigalEntryError.name = 'IlligalEntryError';
// fileOrDir/fileBuffer/stream
exports.entryType = entry => {
if (!entry) return;
if (entry instanceof Buffer) return 'buffer';
if (typeof entry._read === 'function' || typeof entry._transform === 'function') return 'stream';
if (typeof entry !== 'string') throw illigalEntryError;
return 'fileOrDir';
};
exports.clone = obj => {
const newObj = {};
for (const i in obj) {
newObj[i] = obj[i];
}
return newObj;
};
exports.makeFileProcessFn = StreamClass => {
return (source, dest, opts) => {
opts = opts || {};
opts.source = source;
const destStream = destType(dest) === 'path' ? fs.createWriteStream(dest) : dest;
const compressStream = new StreamClass(opts);
return safePipe([ compressStream, destStream ]);
};
};
exports.makeCompressDirFn = StreamClass => {
return (dir, dest, opts) => {
const destStream = destType(dest) === 'path' ? fs.createWriteStream(dest) : dest;
const compressStream = new StreamClass();
compressStream.addEntry(dir, opts);
return safePipe([ compressStream, destStream ]);
};
};
exports.makeUncompressFn = StreamClass => {
return (source, destDir, opts) => {
opts = opts || {};
opts.source = source;
if (destType(destDir) !== 'path') {
const error = new Error('uncompress destination must be a directory');
error.name = 'IlligalDestError';
throw error;
}
return new Promise((resolve, reject) => {
mkdirp(destDir, err => {
if (err) return reject(err);
let entryCount = 0;
let successCount = 0;
let isFinish = false;
function done() {
// resolve when both stream finish and file write finish
if (isFinish && entryCount === successCount) resolve();
}
new StreamClass(opts)
.on('finish', () => {
isFinish = true;
done();
})
.on('error', reject)
.on('entry', (header, stream, next) => {
stream.on('end', next);
if (header.type === 'file') {
const fullpath = path.join(destDir, header.name);
mkdirp(path.dirname(fullpath), err => {
if (err) return reject(err);
entryCount++;
pump(stream, fs.createWriteStream(fullpath, { mode: header.mode }), err => {
if (err) return reject(err);
successCount++;
done();
});
});
} else { // directory
mkdirp(path.join(destDir, header.name), err => {
if (err) return reject(err);
stream.resume();
});
}
});
});
});
};
};
exports.streamToBuffer = stream => {
return new Promise((resolve, reject) => {
const chunks = [];
stream
.on('readable', () => {
let chunk;
while ((chunk = stream.read())) chunks.push(chunk);
})
.on('end', () => resolve(Buffer.concat(chunks)))
.on('error', err => reject(err));
});
};
function safePipe(streams) {
return new Promise((resolve, reject) => {
pump(streams[0], streams[1], err => {
if (err) return reject(err);
resolve();
});
});
}
exports.safePipe = safePipe;
exports.stripFileName = (strip, fileName, type) => {
// before
// node/package.json
// node/lib/index.js
//
// when strip 1
// package.json
// lib/index.js
//
// when strip 2
// package.json
// index.js
if (Buffer.isBuffer(fileName)) fileName = fileName.toString();
// use / instead of \\
if (fileName.indexOf('\\') !== -1) fileName = fileName.replace(/\\+/g, '/');
// fix absolute path
// /foo => foo
if (fileName[0] === '/') fileName = fileName.replace(/^\/+/, '');
let s = fileName.split('/');
// fix relative path
// foo/../bar/../../asdf/
// => asdf/
if (s.indexOf('..') !== -1) {
fileName = path.normalize(fileName);
// https://npm.taobao.org/mirrors/node/latest/docs/api/path.html#path_path_normalize_path
if (process.platform === 'win32') fileName = fileName.replace(/\\+/g, '/');
// replace '../' on ../../foo/bar
fileName = fileName.replace(/(\.\.\/)+/, '');
if (type === 'directory' && fileName && fileName[fileName.length - 1] !== '/') {
fileName += '/';
}
s = fileName.split('/');
}
strip = Math.min(strip, s.length - 1);
return s.slice(strip).join('/') || '/';
};
'use strict';
const path = require('path');
const yazl = require('yazl');
const assert = require('assert');
const stream = require('stream');
const utils = require('../utils');
const ready = require('get-ready');
class ZipFileStream extends stream.Transform {
constructor(opts) {
super(opts);
const sourceType = utils.sourceType(opts.source);
const zipfile = new yazl.ZipFile();
const zipStream = zipfile.outputStream;
zipStream.on('data', data => this.push(data));
zipStream.on('end', () => this.ready(true));
zipfile.on('error', err => this.emit('error', err));
if (sourceType !== 'file') {
assert(opts.relativePath, 'opts.relativePath is required when compressing a buffer, or a stream');
}
if (sourceType) {
this.end();
}
if (sourceType === 'file') {
zipfile.addFile(opts.source, opts.relativePath || path.basename(opts.source), opts.yazl);
} else if (sourceType === 'buffer') {
zipfile.addBuffer(opts.source, opts.relativePath, opts.yazl);
} else if (sourceType === 'stream') {
zipfile.addReadStream(opts.source, opts.relativePath, opts.yazl);
} else { // undefined
const passThrough = this._passThrough = new stream.PassThrough();
this.on('finish', () => passThrough.end());
zipfile.addReadStream(passThrough, opts.relativePath, opts.yazl);
}
zipfile.end(opts.yazl);
}
_transform(chunk, encoding, callback) {
if (this._passThrough) {
this._passThrough.write(chunk, encoding, callback);
}
}
_flush(callback) {
this.ready(callback);
}
}
ready.mixin(ZipFileStream.prototype);
module.exports = ZipFileStream;
'use strict';
const utils = require('../utils');
const ZipStream = require('./stream');
const ZipFileStream = require('./file_stream');
const ZipUncompressStream = require('./uncompress_stream');
exports.Stream = ZipStream;
exports.FileStream = ZipFileStream;
exports.UncompressStream = ZipUncompressStream;
exports.compressDir = utils.makeCompressDirFn(ZipStream);
exports.compressFile = utils.makeFileProcessFn(ZipFileStream);
exports.uncompress = utils.makeUncompressFn(ZipUncompressStream);
'use strict';
const path = require('path');
const yazl = require('yazl');
const TarStream = require('../tar/stream');
class ZipStream extends TarStream {
_init() {
const zipfile = this._zipfile = new yazl.ZipFile();
const stream = zipfile.outputStream;
stream.on('end', () => this.push(null));
stream.on('data', chunk => this.push(chunk));
stream.on('error', err => this.emit('error', err));
}
_addFileEntry(entry, opts) {
this._zipfile.addFile(entry, opts.relativePath || path.basename(entry), opts);
this._onEntryFinish();
}
_addBufferEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required if entry is a buffer'));
this._zipfile.addBuffer(entry, opts.relativePath, opts);
this._onEntryFinish();
}
_addStreamEntry(entry, opts) {
if (!opts.relativePath) return this.emit('error', new Error('opts.relativePath is required if entry is a stream'));
entry.on('error', err => this.emit('error', err));
this._zipfile.addReadStream(entry, opts.relativePath, opts);
this._onEntryFinish();
}
_finalize() {
this._zipfile.end();
}
}
module.exports = ZipStream;
'use strict';
// https://github.com/thejoshwolfe/yauzl#no-streaming-unzip-api
const yauzl = require('yauzl');
const stream = require('stream');
const UncompressBaseStream = require('../base_write_stream');
const utils = require('../utils');
// lazy load iconv-lite
let iconv;
const YAUZL_CALLBACK = Symbol('ZipUncompressStream#yauzlCallback');
const STRIP_NAME = Symbol('ZipUncompressStream#stripName');
// don't decodeStrings on yauzl, we should handle fileName by ourself
// see validateFileName on https://github.com/thejoshwolfe/yauzl/blob/51010ce4e8c7e6345efe195e1b4150518f37b393/index.js#L607
// - support "absolute path"
const DEFAULTS = { lazyEntries: true, decodeStrings: false };
class ZipUncompressStream extends UncompressBaseStream {
constructor(opts) {
opts = opts || {};
super(opts);
this._chunks = [];
this._strip = Number(opts.strip) || 0;
this._zipFileNameEncoding = opts.zipFileNameEncoding || 'utf8';
if (this._zipFileNameEncoding === 'utf-8') {
this._zipFileNameEncoding = 'utf8';
}
this[YAUZL_CALLBACK] = this[YAUZL_CALLBACK].bind(this);
const sourceType = utils.sourceType(opts.source);
const yauzlOpts = this._yauzlOpts = Object.assign({}, DEFAULTS, opts.yauzl);
if (sourceType === 'file') {
yauzl.open(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
return;
}
if (sourceType === 'buffer') {
yauzl.fromBuffer(opts.source, yauzlOpts, this[YAUZL_CALLBACK]);
return;
}
if (sourceType === 'stream') {
utils.streamToBuffer(opts.source)
.then(buf => yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]))
.catch(e => this.emit('error', e));
return;
}
this.on('pipe', srcStream => {
srcStream.unpipe(srcStream);
utils.streamToBuffer(srcStream)
.then(buf => {
this._chunks.push(buf);
buf = Buffer.concat(this._chunks);
yauzl.fromBuffer(buf, yauzlOpts, this[YAUZL_CALLBACK]);
})
.catch(e => this.emit('error', e));
});
}
_write(chunk) {
// push to _chunks array, this will only happen once, for stream will be unpiped.
this._chunks.push(chunk);
}
[YAUZL_CALLBACK](err, zipFile) {
if (err) return this.emit('error', err);
zipFile.readEntry();
zipFile
.on('entry', entry => {
// fileName is buffer by default because decodeStrings = false
if (Buffer.isBuffer(entry.fileName)) {
if (this._zipFileNameEncoding === 'utf8') {
entry.fileName = entry.fileName.toString();
} else {
if (!iconv) {
iconv = require('iconv-lite');
}
entry.fileName = iconv.decode(entry.fileName, this._zipFileNameEncoding);
}
}
// directory file names end with '/'
const type = /\/$/.test(entry.fileName) ? 'directory' : 'file';
const name = entry.fileName = this[STRIP_NAME](entry.fileName, type);
const header = { name, type, yauzl: entry };
if (type === 'file') {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) return this.emit('error', err);
this.emit('entry', header, readStream, next);
});
} else { // directory
const placeholder = new stream.Readable({ read() {} });
this.emit('entry', header, placeholder, next);
setImmediate(() => placeholder.emit('end'));
}
})
.on('end', () => this.emit('finish'))
.on('error', err => this.emit('error', err));
function next() {
zipFile.readEntry();
}
}
[STRIP_NAME](fileName, type) {
return utils.stripFileName(this._strip, fileName, type);
}
}
module.exports = ZipUncompressStream;
../../../mkdirp/bin/cmd.js
\ No newline at end of file
{
"name": "compressing",
"version": "1.5.1",
"description": "Everything you need for compressing and uncompressing",
"main": "index.js",
"scripts": {
"ts-test": "tsc -p ./test/fixtures/types/tsconfig.json",
"test": "egg-bin test && npm run ts-test",
"cov": "egg-bin cov",
"lint": "eslint .",
"ci": "npm run lint && npm run ts-test && npm run cov"
},
"repository": {
"type": "git",
"url": "git+https://github.com/node-modules/compressing.git"
},
"keywords": [
"compress",
"uncompress",
"util",
"tgz",
"gzip",
"tar",
"zip",
"stream"
],
"typings": "index.d.ts",
"files": [
"index.js",
"index.d.ts",
"lib"
],
"author": "shaoshuai0102 <shaoshuai0102@gmail.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/node-modules/compressing/issues"
},
"homepage": "https://github.com/node-modules/compressing#readme",
"dependencies": {
"flushwritable": "^1.0.0",
"get-ready": "^1.0.0",
"iconv-lite": "^0.5.0",
"mkdirp": "^0.5.1",
"pump": "^3.0.0",
"streamifier": "^0.1.1",
"tar-stream": "^1.5.2",
"yauzl": "^2.7.0",
"yazl": "^2.4.2"
},
"devDependencies": {
"@types/mocha": "^5.2.5",
"@types/node": "^10.12.9",
"dir-compare": "^1.3.0",
"egg-bin": "^1.9.1",
"egg-ci": "^1.8.0",
"eslint": "^3.10.2",
"eslint-config-egg": "^3.2.0",
"mm": "^2.0.0",
"mz-modules": "^2.1.0",
"power-assert": "^1.4.2",
"rimraf": "^2.6.2",
"typescript": "^3.1.6",
"uuid": "^3.0.1"
},
"engines": {
"node": ">= 4.0.0"
},
"ci": {
"version": "4, 6, 8, 10, 12",
"license": {
"year": "2017",
"fullname": "node-modules and other contributors"
}
}
}
Copyright Node.js contributors. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
# core-util-is
The `util.is*` functions introduced in Node v0.12.
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// NOTE: These type checking functions intentionally don't use `instanceof`
// because it is fragile and can be easily faked with `Object.create()`.
function isArray(arg) {
if (Array.isArray) {
return Array.isArray(arg);
}
return objectToString(arg) === '[object Array]';
}
exports.isArray = isArray;
function isBoolean(arg) {
return typeof arg === 'boolean';
}
exports.isBoolean = isBoolean;
function isNull(arg) {
return arg === null;
}
exports.isNull = isNull;
function isNullOrUndefined(arg) {
return arg == null;
}
exports.isNullOrUndefined = isNullOrUndefined;
function isNumber(arg) {
return typeof arg === 'number';
}
exports.isNumber = isNumber;
function isString(arg) {
return typeof arg === 'string';
}
exports.isString = isString;
function isSymbol(arg) {
return typeof arg === 'symbol';
}
exports.isSymbol = isSymbol;
function isUndefined(arg) {
return arg === void 0;
}
exports.isUndefined = isUndefined;
function isRegExp(re) {
return objectToString(re) === '[object RegExp]';
}
exports.isRegExp = isRegExp;
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
exports.isObject = isObject;
function isDate(d) {
return objectToString(d) === '[object Date]';
}
exports.isDate = isDate;
function isError(e) {
return (objectToString(e) === '[object Error]' || e instanceof Error);
}
exports.isError = isError;
function isFunction(arg) {
return typeof arg === 'function';
}
exports.isFunction = isFunction;
function isPrimitive(arg) {
return arg === null ||
typeof arg === 'boolean' ||
typeof arg === 'number' ||
typeof arg === 'string' ||
typeof arg === 'symbol' || // ES6 symbol
typeof arg === 'undefined';
}
exports.isPrimitive = isPrimitive;
exports.isBuffer = require('buffer').Buffer.isBuffer;
function objectToString(o) {
return Object.prototype.toString.call(o);
}
{
"name": "core-util-is",
"version": "1.0.3",
"description": "The `util.is*` functions introduced in Node v0.12.",
"main": "lib/util.js",
"files": [
"lib"
],
"repository": {
"type": "git",
"url": "git://github.com/isaacs/core-util-is"
},
"keywords": [
"util",
"isBuffer",
"isArray",
"isNumber",
"isString",
"isRegExp",
"isThis",
"isThat",
"polyfill"
],
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "MIT",
"bugs": {
"url": "https://github.com/isaacs/core-util-is/issues"
},
"scripts": {
"test": "tap test.js",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
},
"devDependencies": {
"tap": "^15.0.9"
}
}
The MIT License (MIT)
Copyright (c) 2014 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\ No newline at end of file
# end-of-stream
A node module that calls a callback when a readable/writable/duplex stream has completed or failed.
npm install end-of-stream
[![Build status](https://travis-ci.org/mafintosh/end-of-stream.svg?branch=master)](https://travis-ci.org/mafintosh/end-of-stream)
## Usage
Simply pass a stream and a callback to the `eos`.
Both legacy streams, streams2 and stream3 are supported.
``` js
var eos = require('end-of-stream');
eos(readableStream, function(err) {
// this will be set to the stream instance
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended', this === readableStream);
});
eos(writableStream, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has finished', this === writableStream);
});
eos(duplexStream, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended and finished', this === duplexStream);
});
eos(duplexStream, {readable:false}, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has finished but might still be readable');
});
eos(duplexStream, {writable:false}, function(err) {
if (err) return console.log('stream had an error or closed early');
console.log('stream has ended but might still be writable');
});
eos(readableStream, {error:false}, function(err) {
// do not treat emit('error', err) as a end-of-stream
});
```
## License
MIT
## Related
`end-of-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one.
var once = require('once');
var noop = function() {};
var isRequest = function(stream) {
return stream.setHeader && typeof stream.abort === 'function';
};
var isChildProcess = function(stream) {
return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3
};
var eos = function(stream, opts, callback) {
if (typeof opts === 'function') return eos(stream, null, opts);
if (!opts) opts = {};
callback = once(callback || noop);
var ws = stream._writableState;
var rs = stream._readableState;
var readable = opts.readable || (opts.readable !== false && stream.readable);
var writable = opts.writable || (opts.writable !== false && stream.writable);
var cancelled = false;
var onlegacyfinish = function() {
if (!stream.writable) onfinish();
};
var onfinish = function() {
writable = false;
if (!readable) callback.call(stream);
};
var onend = function() {
readable = false;
if (!writable) callback.call(stream);
};
var onexit = function(exitCode) {
callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null);
};
var onerror = function(err) {
callback.call(stream, err);
};
var onclose = function() {
process.nextTick(onclosenexttick);
};
var onclosenexttick = function() {
if (cancelled) return;
if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close'));
if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close'));
};
var onrequest = function() {
stream.req.on('finish', onfinish);
};
if (isRequest(stream)) {
stream.on('complete', onfinish);
stream.on('abort', onclose);
if (stream.req) onrequest();
else stream.on('request', onrequest);
} else if (writable && !ws) { // legacy streams
stream.on('end', onlegacyfinish);
stream.on('close', onlegacyfinish);
}
if (isChildProcess(stream)) stream.on('exit', onexit);
stream.on('end', onend);
stream.on('finish', onfinish);
if (opts.error !== false) stream.on('error', onerror);
stream.on('close', onclose);
return function() {
cancelled = true;
stream.removeListener('complete', onfinish);
stream.removeListener('abort', onclose);
stream.removeListener('request', onrequest);
if (stream.req) stream.req.removeListener('finish', onfinish);
stream.removeListener('end', onlegacyfinish);
stream.removeListener('close', onlegacyfinish);
stream.removeListener('finish', onfinish);
stream.removeListener('exit', onexit);
stream.removeListener('end', onend);
stream.removeListener('error', onerror);
stream.removeListener('close', onclose);
};
};
module.exports = eos;
{
"name": "end-of-stream",
"version": "1.4.4",
"description": "Call a callback when a readable/writable/duplex stream has completed or failed.",
"repository": {
"type": "git",
"url": "git://github.com/mafintosh/end-of-stream.git"
},
"dependencies": {
"once": "^1.4.0"
},
"scripts": {
"test": "node test.js"
},
"files": [
"index.js"
],
"keywords": [
"stream",
"streams",
"callback",
"finish",
"close",
"end",
"wait"
],
"bugs": {
"url": "https://github.com/mafintosh/end-of-stream/issues"
},
"homepage": "https://github.com/mafintosh/end-of-stream",
"main": "index.js",
"author": "Mathias Buus <mathiasbuus@gmail.com>",
"license": "MIT",
"devDependencies": {
"tape": "^4.11.0"
}
}
language: node_js
node_js:
- "0.10"
script:
- "npm run test-travis"
after_script:
- "npm install coveralls@2 && cat ./coverage/lcov.info | ./node_modules/.bin/coveralls"
### 1.0.1
* use `setImmediate` instead of `nextTick`
### 1.0.0
* `new FdSlicer(fd, options)` must now be `fdSlicer.createFromFd(fd, options)`
* fix behavior when `end` is 0.
* fix `createWriteStream` when using `createFromBuffer`
### 0.4.0
* add ability to create an FdSlicer instance from a Buffer
### 0.3.2
* fix write stream and read stream destroy behavior
### 0.3.1
* write stream: fix end option behavior
### 0.3.0
* write stream emits 'progress' events
* write stream supports 'end' option which causes the stream to emit an error
if a maximum size is exceeded
* improve documentation
### 0.2.1
* Update pend dependency to latest bugfix version.
### 0.2.0
* Add read and write functions
### 0.1.0
* Add `autoClose` option and `ref()` and `unref()`.
### 0.0.2
* Add API documentation
* read stream: create buffer at last possible moment
### 0.0.1
* Initial release
Copyright (c) 2014 Andrew Kelley
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation files
(the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
# fd-slicer
[![Build Status](https://travis-ci.org/andrewrk/node-fd-slicer.svg?branch=master)](https://travis-ci.org/andrewrk/node-fd-slicer)
Safe `fs.ReadStream` and `fs.WriteStream` using the same fd.
Let's say that you want to perform a parallel upload of a file to a remote
server. To do this, we want to create multiple read streams. The first thing
you might think of is to use the `{start: 0, end: 0}` API of
`fs.createReadStream`. This gives you two choices:
0. Use the same file descriptor for all `fs.ReadStream` objects.
0. Open the file multiple times, resulting in a separate file descriptor
for each read stream.
Neither of these are acceptable options. The first one is a severe bug,
because the API docs for `fs.write` state:
> Note that it is unsafe to use `fs.write` multiple times on the same file
> without waiting for the callback. For this scenario, `fs.createWriteStream`
> is strongly recommended.
`fs.createWriteStream` will solve the problem if you only create one of them
for the file descriptor, but it will exhibit this unsafety if you create
multiple write streams per file descriptor.
The second option suffers from a race condition. For each additional time the
file is opened after the first, it is possible that the file is modified. So
in our parallel uploading example, we might upload a corrupt file that never
existed on the client's computer.
This module solves this problem by providing `createReadStream` and
`createWriteStream` that operate on a shared file descriptor and provides
the convenient stream API while still allowing slicing and dicing.
This module also gives you some additional power that the builtin
`fs.createWriteStream` do not give you. These features are:
* Emitting a 'progress' event on write.
* Ability to set a maximum size and emit an error if this size is exceeded.
* Ability to create an `FdSlicer` instance from a `Buffer`. This enables you
to provide API for handling files as well as buffers using the same API.
## Usage
```js
var fdSlicer = require('fd-slicer');
var fs = require('fs');
fs.open("file.txt", 'r', function(err, fd) {
if (err) throw err;
var slicer = fdSlicer.createFromFd(fd);
var firstPart = slicer.createReadStream({start: 0, end: 100});
var secondPart = slicer.createReadStream({start: 100});
var firstOut = fs.createWriteStream("first.txt");
var secondOut = fs.createWriteStream("second.txt");
firstPart.pipe(firstOut);
secondPart.pipe(secondOut);
});
```
You can also create from a buffer:
```js
var fdSlicer = require('fd-slicer');
var slicer = FdSlicer.createFromBuffer(someBuffer);
var firstPart = slicer.createReadStream({start: 0, end: 100});
var secondPart = slicer.createReadStream({start: 100});
var firstOut = fs.createWriteStream("first.txt");
var secondOut = fs.createWriteStream("second.txt");
firstPart.pipe(firstOut);
secondPart.pipe(secondOut);
```
## API Documentation
### fdSlicer.createFromFd(fd, [options])
```js
var fdSlicer = require('fd-slicer');
fs.open("file.txt", 'r', function(err, fd) {
if (err) throw err;
var slicer = fdSlicer.createFromFd(fd);
// ...
});
```
Make sure `fd` is a properly initialized file descriptor. If you want to
use `createReadStream` make sure you open it for reading and if you want
to use `createWriteStream` make sure you open it for writing.
`options` is an optional object which can contain:
* `autoClose` - if set to `true`, the file descriptor will be automatically
closed once the last stream that references it is closed. Defaults to
`false`. `ref()` and `unref()` can be used to increase or decrease the
reference count, respectively.
### fdSlicer.createFromBuffer(buffer, [options])
```js
var fdSlicer = require('fd-slicer');
var slicer = fdSlicer.createFromBuffer(someBuffer);
// ...
```
`options` is an optional object which can contain:
* `maxChunkSize` - A `Number` of bytes. see `createReadStream()`.
If falsey, defaults to unlimited.
#### Properties
##### fd
The file descriptor passed in. `undefined` if created from a buffer.
#### Methods
##### createReadStream(options)
Available `options`:
* `start` - Number. The offset into the file to start reading from. Defaults
to 0.
* `end` - Number. Exclusive upper bound offset into the file to stop reading
from.
* `highWaterMark` - Number. The maximum number of bytes to store in the
internal buffer before ceasing to read from the underlying resource.
Defaults to 16 KB.
* `encoding` - String. If specified, then buffers will be decoded to strings
using the specified encoding. Defaults to `null`.
The ReadableStream that this returns has these additional methods:
* `destroy(err)` - stop streaming. `err` is optional and is the error that
will be emitted in order to cause the streaming to stop. Defaults to
`new Error("stream destroyed")`.
If `maxChunkSize` was specified (see `createFromBuffer()`), the read stream
will provide chunks of at most that size. Normally, the read stream provides
the entire range requested in a single chunk, but this can cause performance
problems in some circumstances.
See [thejoshwolfe/yauzl#87](https://github.com/thejoshwolfe/yauzl/issues/87).
##### createWriteStream(options)
Available `options`:
* `start` - Number. The offset into the file to start writing to. Defaults to
0.
* `end` - Number. Exclusive upper bound offset into the file. If this offset
is reached, the write stream will emit an 'error' event and stop functioning.
In this situation, `err.code === 'ETOOBIG'`. Defaults to `Infinity`.
* `highWaterMark` - Number. Buffer level when `write()` starts returning
false. Defaults to 16KB.
* `decodeStrings` - Boolean. Whether or not to decode strings into Buffers
before passing them to` _write()`. Defaults to `true`.
The WritableStream that this returns has these additional methods:
* `destroy()` - stop streaming
And these additional properties:
* `bytesWritten` - number of bytes written to the stream
And these additional events:
* 'progress' - emitted when `bytesWritten` changes.
##### read(buffer, offset, length, position, callback)
Equivalent to `fs.read`, but with concurrency protection.
`callback` must be defined.
##### write(buffer, offset, length, position, callback)
Equivalent to `fs.write`, but with concurrency protection.
`callback` must be defined.
##### ref()
Increase the `autoClose` reference count by 1.
##### unref()
Decrease the `autoClose` reference count by 1.
#### Events
##### 'error'
Emitted if `fs.close` returns an error when auto closing.
##### 'close'
Emitted when fd-slicer closes the file descriptor due to `autoClose`. Never
emitted if created from a buffer.
var fs = require('fs');
var util = require('util');
var stream = require('stream');
var Readable = stream.Readable;
var Writable = stream.Writable;
var PassThrough = stream.PassThrough;
var Pend = require('pend');
var EventEmitter = require('events').EventEmitter;
exports.createFromBuffer = createFromBuffer;
exports.createFromFd = createFromFd;
exports.BufferSlicer = BufferSlicer;
exports.FdSlicer = FdSlicer;
util.inherits(FdSlicer, EventEmitter);
function FdSlicer(fd, options) {
options = options || {};
EventEmitter.call(this);
this.fd = fd;
this.pend = new Pend();
this.pend.max = 1;
this.refCount = 0;
this.autoClose = !!options.autoClose;
}
FdSlicer.prototype.read = function(buffer, offset, length, position, callback) {
var self = this;
self.pend.go(function(cb) {
fs.read(self.fd, buffer, offset, length, position, function(err, bytesRead, buffer) {
cb();
callback(err, bytesRead, buffer);
});
});
};
FdSlicer.prototype.write = function(buffer, offset, length, position, callback) {
var self = this;
self.pend.go(function(cb) {
fs.write(self.fd, buffer, offset, length, position, function(err, written, buffer) {
cb();
callback(err, written, buffer);
});
});
};
FdSlicer.prototype.createReadStream = function(options) {
return new ReadStream(this, options);
};
FdSlicer.prototype.createWriteStream = function(options) {
return new WriteStream(this, options);
};
FdSlicer.prototype.ref = function() {
this.refCount += 1;
};
FdSlicer.prototype.unref = function() {
var self = this;
self.refCount -= 1;
if (self.refCount > 0) return;
if (self.refCount < 0) throw new Error("invalid unref");
if (self.autoClose) {
fs.close(self.fd, onCloseDone);
}
function onCloseDone(err) {
if (err) {
self.emit('error', err);
} else {
self.emit('close');
}
}
};
util.inherits(ReadStream, Readable);
function ReadStream(context, options) {
options = options || {};
Readable.call(this, options);
this.context = context;
this.context.ref();
this.start = options.start || 0;
this.endOffset = options.end;
this.pos = this.start;
this.destroyed = false;
}
ReadStream.prototype._read = function(n) {
var self = this;
if (self.destroyed) return;
var toRead = Math.min(self._readableState.highWaterMark, n);
if (self.endOffset != null) {
toRead = Math.min(toRead, self.endOffset - self.pos);
}
if (toRead <= 0) {
self.destroyed = true;
self.push(null);
self.context.unref();
return;
}
self.context.pend.go(function(cb) {
if (self.destroyed) return cb();
var buffer = new Buffer(toRead);
fs.read(self.context.fd, buffer, 0, toRead, self.pos, function(err, bytesRead) {
if (err) {
self.destroy(err);
} else if (bytesRead === 0) {
self.destroyed = true;
self.push(null);
self.context.unref();
} else {
self.pos += bytesRead;
self.push(buffer.slice(0, bytesRead));
}
cb();
});
});
};
ReadStream.prototype.destroy = function(err) {
if (this.destroyed) return;
err = err || new Error("stream destroyed");
this.destroyed = true;
this.emit('error', err);
this.context.unref();
};
util.inherits(WriteStream, Writable);
function WriteStream(context, options) {
options = options || {};
Writable.call(this, options);
this.context = context;
this.context.ref();
this.start = options.start || 0;
this.endOffset = (options.end == null) ? Infinity : +options.end;
this.bytesWritten = 0;
this.pos = this.start;
this.destroyed = false;
this.on('finish', this.destroy.bind(this));
}
WriteStream.prototype._write = function(buffer, encoding, callback) {
var self = this;
if (self.destroyed) return;
if (self.pos + buffer.length > self.endOffset) {
var err = new Error("maximum file length exceeded");
err.code = 'ETOOBIG';
self.destroy();
callback(err);
return;
}
self.context.pend.go(function(cb) {
if (self.destroyed) return cb();
fs.write(self.context.fd, buffer, 0, buffer.length, self.pos, function(err, bytes) {
if (err) {
self.destroy();
cb();
callback(err);
} else {
self.bytesWritten += bytes;
self.pos += bytes;
self.emit('progress');
cb();
callback();
}
});
});
};
WriteStream.prototype.destroy = function() {
if (this.destroyed) return;
this.destroyed = true;
this.context.unref();
};
util.inherits(BufferSlicer, EventEmitter);
function BufferSlicer(buffer, options) {
EventEmitter.call(this);
options = options || {};
this.refCount = 0;
this.buffer = buffer;
this.maxChunkSize = options.maxChunkSize || Number.MAX_SAFE_INTEGER;
}
BufferSlicer.prototype.read = function(buffer, offset, length, position, callback) {
var end = position + length;
var delta = end - this.buffer.length;
var written = (delta > 0) ? delta : length;
this.buffer.copy(buffer, offset, position, end);
setImmediate(function() {
callback(null, written);
});
};
BufferSlicer.prototype.write = function(buffer, offset, length, position, callback) {
buffer.copy(this.buffer, position, offset, offset + length);
setImmediate(function() {
callback(null, length, buffer);
});
};
BufferSlicer.prototype.createReadStream = function(options) {
options = options || {};
var readStream = new PassThrough(options);
readStream.destroyed = false;
readStream.start = options.start || 0;
readStream.endOffset = options.end;
// by the time this function returns, we'll be done.
readStream.pos = readStream.endOffset || this.buffer.length;
// respect the maxChunkSize option to slice up the chunk into smaller pieces.
var entireSlice = this.buffer.slice(readStream.start, readStream.pos);
var offset = 0;
while (true) {
var nextOffset = offset + this.maxChunkSize;
if (nextOffset >= entireSlice.length) {
// last chunk
if (offset < entireSlice.length) {
readStream.write(entireSlice.slice(offset, entireSlice.length));
}
break;
}
readStream.write(entireSlice.slice(offset, nextOffset));
offset = nextOffset;
}
readStream.end();
readStream.destroy = function() {
readStream.destroyed = true;
};
return readStream;
};
BufferSlicer.prototype.createWriteStream = function(options) {
var bufferSlicer = this;
options = options || {};
var writeStream = new Writable(options);
writeStream.start = options.start || 0;
writeStream.endOffset = (options.end == null) ? this.buffer.length : +options.end;
writeStream.bytesWritten = 0;
writeStream.pos = writeStream.start;
writeStream.destroyed = false;
writeStream._write = function(buffer, encoding, callback) {
if (writeStream.destroyed) return;
var end = writeStream.pos + buffer.length;
if (end > writeStream.endOffset) {
var err = new Error("maximum file length exceeded");
err.code = 'ETOOBIG';
writeStream.destroyed = true;
callback(err);
return;
}
buffer.copy(bufferSlicer.buffer, writeStream.pos, 0, buffer.length);
writeStream.bytesWritten += buffer.length;
writeStream.pos = end;
writeStream.emit('progress');
callback();
};
writeStream.destroy = function() {
writeStream.destroyed = true;
};
return writeStream;
};
BufferSlicer.prototype.ref = function() {
this.refCount += 1;
};
BufferSlicer.prototype.unref = function() {
this.refCount -= 1;
if (this.refCount < 0) {
throw new Error("invalid unref");
}
};
function createFromBuffer(buffer, options) {
return new BufferSlicer(buffer, options);
}
function createFromFd(fd, options) {
return new FdSlicer(fd, options);
}
{
"name": "fd-slicer",
"version": "1.1.0",
"description": "safely create multiple ReadStream or WriteStream objects from the same file descriptor",
"main": "index.js",
"scripts": {
"test": "mocha --reporter spec --check-leaks",
"test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --reporter dot --check-leaks test/test.js",
"test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --timeout 10000 --reporter spec --check-leaks test/test.js"
},
"author": "Andrew Kelley <superjoe30@gmail.com>",
"license": "MIT",
"devDependencies": {
"istanbul": "~0.3.3",
"mocha": "~2.0.1",
"stream-equal": "~0.1.5",
"streamsink": "~1.2.0"
},
"dependencies": {
"pend": "~1.2.0"
},
"directories": {
"test": "test"
},
"repository": {
"type": "git",
"url": "git://github.com/andrewrk/node-fd-slicer.git"
},
"bugs": {
"url": "https://github.com/andrewrk/node-fd-slicer/issues"
},
"keywords": [
"createReadStream",
"createWriteStream"
]
}
This diff is collapsed.
language: node_js
node_js:
- "0.10"
# FlushWritable
A Writable stream that flushes before emitting finish.
##ChangeLog
### v1.0.0
- **Initial Public Release**
The MIT License (MIT)
Copyright (c) 2014 Tom Frost
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
# FlushWritable [![Build Status](https://travis-ci.org/TomFrost/FlushWritable.svg?branch=master)](https://travis-ci.org/TomFrost/FlushWritable)
A Writable stream that flushes before emitting finish.
Sponsored by [Leadnomics](http://www.leadnomics.com).
## What it is
Node.js's Streams API is a fantastic tool, but has a nagging shortcoming:
while the Transform stream implements a `_flush` method that is called before
its final events are fired, the Writable stream does not. So if you're
buffering rows to be INSERTed into a SQL table rather than slowly writing one
at a time, or you're buffering bytes for a transfer to S3, there is no way of
flushing those buffers to the target data store before the `finish` event is
emitted.
**FlushWritable is a drop-in replacement for stream.Writable** that implements
a `_flush` call that behaves exactly how Transform._flush does. It's called
with a callback, waits for the callback to be called, and _then_ fires
`finish` (or `error` if an error was passed). No additional execution after
the `finish` event, no implementing nonstandard event types, no chaining a
shell Transform stream before the Writable to hijack its `_flush` call. And
it's fully futureproof against the Node.js team actually adding a `_flush`
method to the native stream.Writable in a later version of Node, so you don't
have to worry about your code breaking on upgrade.
## How does it work?
It's pretty simple. Writable is an EventEmitter. FlushWritable extends
Writable and overrides EventEmitter.emit in its own prototype, listening for a
request that `finish` be emitted. When that comes in, it blocks that event
from emitting, and calls `_flush` if it's defined.
The callback it passes to `_flush` will trigger `finish` to actually be
emitted. If that callback is called with a truthy first argument, `error` is
emitted instead. All other events pass right through and are emitted as
expected. If a future version of node adds a `Writable.prototype._flush`
method, the whole thing short-circuits and native functionality takes over.
## Installation
In your project folder, type:
npm install flushwritable --save
## Usage
Just extend FlushWritable instead of stream.Writable in your write stream, and
feel free to define a `_flush(cb)` function!
```javascript
var FlushWritable = require('flushwritable'),
util = require('util');
function MyWriteStream(opts) {
FlushWritable.call(this, opts);
this._buffer = [];
}
util.inherits(MyWriteStream, FlushWritable);
MyWriteStream.prototype._flush = function(cb) {
writeBufferSomewhere(this._buffer, cb);
};
MyWriteStream.prototype._write = function(data, encoding, cb) {
this._buffer.push(data);
cb();
};
```
## License
FlushWritable is distributed under the MIT license.
## Credits
FlushWritable was created by Tom Frost at Leadnomics in 2014.
/*
* FlushWritable
* Copyright 2014 Tom Frost
*/
var EventEmitter = require('events').EventEmitter,
Writable = require('stream').Writable,
util = require('util');
/**
* FlushWritable is a drop-in replacement for stream.Writable that implements
* the Transform stream's _flush() method. FlushWritable is meant to be
* extended, just like stream.Writable. However, in the child class's
* prototype, a method called _flush(cb) can be defined that will halt the
* firing of the 'finish' event until the callback is called. If the callback
* if called with a truthy first argument, 'error' is emitted instead.
* @param {Object} [opts] Options to configure this Writable stream. See the
* Node.js docs for stream.Writable.
* @constructor
*/
function FlushWritable(opts) {
Writable.call(this, opts);
}
util.inherits(FlushWritable, Writable);
FlushWritable.prototype.emit = function(evt) {
if (evt === 'finish' && this._flush && !Writable.prototype._flush) {
this._flush(function(err) {
if (err)
EventEmitter.prototype.emit.call(this, 'error', err);
else
EventEmitter.prototype.emit.call(this, 'finish');
}.bind(this));
}
else {
var args = Array.prototype.slice.call(arguments);
EventEmitter.prototype.emit.apply(this, args);
}
};
module.exports = FlushWritable;
{
"name": "flushwritable",
"version": "1.0.0",
"description": "A Writable stream that flushes before emitting finish",
"main": "lib/FlushWritable.js",
"directories": {
"test": "test"
},
"scripts": {
"test": "mocha -R spec test"
},
"repository": {
"type": "git",
"url": "git://github.com/TomFrost/FlushWritable"
},
"keywords": [
"stream",
"streams",
"writable",
"flush",
"transform",
"wrapper"
],
"author": "Tom Frost <tom@frosteddesign.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/TomFrost/FlushWritable/issues"
},
"homepage": "https://github.com/TomFrost/FlushWritable",
"devDependencies": {
"mocha": "^2.0.1",
"should": "^4.3.0"
}
}
This diff is collapsed.
The MIT License (MIT)
Copyright (c) 2018 Mathias Buus
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
This diff is collapsed.
module.exports = require('constants')
module.exports = require('fs').constants || require('constants')
{
"name": "fs-constants",
"version": "1.0.0",
"description": "Require constants across node and the browser",
"main": "index.js",
"browser": "browser.js",
"dependencies": {},
"devDependencies": {},
"repository": {
"type": "git",
"url": "https://github.com/mafintosh/fs-constants.git"
},
"author": "Mathias Buus (@mafintosh)",
"license": "MIT",
"bugs": {
"url": "https://github.com/mafintosh/fs-constants/issues"
},
"homepage": "https://github.com/mafintosh/fs-constants"
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
{"uChars":[128,165,169,178,184,216,226,235,238,244,248,251,253,258,276,284,300,325,329,334,364,463,465,467,469,471,473,475,477,506,594,610,712,716,730,930,938,962,970,1026,1104,1106,8209,8215,8218,8222,8231,8241,8244,8246,8252,8365,8452,8454,8458,8471,8482,8556,8570,8596,8602,8713,8720,8722,8726,8731,8737,8740,8742,8748,8751,8760,8766,8777,8781,8787,8802,8808,8816,8854,8858,8870,8896,8979,9322,9372,9548,9588,9616,9622,9634,9652,9662,9672,9676,9680,9702,9735,9738,9793,9795,11906,11909,11913,11917,11928,11944,11947,11951,11956,11960,11964,11979,12284,12292,12312,12319,12330,12351,12436,12447,12535,12543,12586,12842,12850,12964,13200,13215,13218,13253,13263,13267,13270,13384,13428,13727,13839,13851,14617,14703,14801,14816,14964,15183,15471,15585,16471,16736,17208,17325,17330,17374,17623,17997,18018,18212,18218,18301,18318,18760,18811,18814,18820,18823,18844,18848,18872,19576,19620,19738,19887,40870,59244,59336,59367,59413,59417,59423,59431,59437,59443,59452,59460,59478,59493,63789,63866,63894,63976,63986,64016,64018,64021,64025,64034,64037,64042,65074,65093,65107,65112,65127,65132,65375,65510,65536],"gbChars":[0,36,38,45,50,81,89,95,96,100,103,104,105,109,126,133,148,172,175,179,208,306,307,308,309,310,311,312,313,341,428,443,544,545,558,741,742,749,750,805,819,820,7922,7924,7925,7927,7934,7943,7944,7945,7950,8062,8148,8149,8152,8164,8174,8236,8240,8262,8264,8374,8380,8381,8384,8388,8390,8392,8393,8394,8396,8401,8406,8416,8419,8424,8437,8439,8445,8482,8485,8496,8521,8603,8936,8946,9046,9050,9063,9066,9076,9092,9100,9108,9111,9113,9131,9162,9164,9218,9219,11329,11331,11334,11336,11346,11361,11363,11366,11370,11372,11375,11389,11682,11686,11687,11692,11694,11714,11716,11723,11725,11730,11736,11982,11989,12102,12336,12348,12350,12384,12393,12395,12397,12510,12553,12851,12962,12973,13738,13823,13919,13933,14080,14298,14585,14698,15583,15847,16318,16434,16438,16481,16729,17102,17122,17315,17320,17402,17418,17859,17909,17911,17915,17916,17936,17939,17961,18664,18703,18814,18962,19043,33469,33470,33471,33484,33485,33490,33497,33501,33505,33513,33520,33536,33550,37845,37921,37948,38029,38038,38064,38065,38066,38069,38075,38076,38078,39108,39109,39113,39114,39115,39116,39265,39394,189000]}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment