7425 lines
218 KiB
JavaScript
7425 lines
218 KiB
JavaScript
(function (global, factory) {
|
|
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
|
|
typeof define === 'function' && define.amd ? define(['exports'], factory) :
|
|
(global = global || self, factory(global.json2csv = {}));
|
|
}(this, function (exports) { 'use strict';
|
|
|
|
var domain;
|
|
|
|
// This constructor is used to store event handlers. Instantiating this is
|
|
// faster than explicitly calling `Object.create(null)` to get a "clean" empty
|
|
// object (tested with v8 v4.9).
|
|
function EventHandlers() {}
|
|
EventHandlers.prototype = Object.create(null);
|
|
|
|
function EventEmitter() {
|
|
EventEmitter.init.call(this);
|
|
}
|
|
|
|
// nodejs oddity
|
|
// require('events') === require('events').EventEmitter
|
|
EventEmitter.EventEmitter = EventEmitter;
|
|
|
|
EventEmitter.usingDomains = false;
|
|
|
|
EventEmitter.prototype.domain = undefined;
|
|
EventEmitter.prototype._events = undefined;
|
|
EventEmitter.prototype._maxListeners = undefined;
|
|
|
|
// By default EventEmitters will print a warning if more than 10 listeners are
|
|
// added to it. This is a useful default which helps finding memory leaks.
|
|
EventEmitter.defaultMaxListeners = 10;
|
|
|
|
EventEmitter.init = function() {
|
|
this.domain = null;
|
|
if (EventEmitter.usingDomains) {
|
|
// if there is an active domain, then attach to it.
|
|
if (domain.active && !(this instanceof domain.Domain)) ;
|
|
}
|
|
|
|
if (!this._events || this._events === Object.getPrototypeOf(this)._events) {
|
|
this._events = new EventHandlers();
|
|
this._eventsCount = 0;
|
|
}
|
|
|
|
this._maxListeners = this._maxListeners || undefined;
|
|
};
|
|
|
|
// Obviously not all Emitters should be limited to 10. This function allows
|
|
// that to be increased. Set to zero for unlimited.
|
|
EventEmitter.prototype.setMaxListeners = function setMaxListeners(n) {
|
|
if (typeof n !== 'number' || n < 0 || isNaN(n))
|
|
throw new TypeError('"n" argument must be a positive number');
|
|
this._maxListeners = n;
|
|
return this;
|
|
};
|
|
|
|
function $getMaxListeners(that) {
|
|
if (that._maxListeners === undefined)
|
|
return EventEmitter.defaultMaxListeners;
|
|
return that._maxListeners;
|
|
}
|
|
|
|
EventEmitter.prototype.getMaxListeners = function getMaxListeners() {
|
|
return $getMaxListeners(this);
|
|
};
|
|
|
|
// These standalone emit* functions are used to optimize calling of event
|
|
// handlers for fast cases because emit() itself often has a variable number of
|
|
// arguments and can be deoptimized because of that. These functions always have
|
|
// the same number of arguments and thus do not get deoptimized, so the code
|
|
// inside them can execute faster.
|
|
function emitNone(handler, isFn, self) {
|
|
if (isFn)
|
|
handler.call(self);
|
|
else {
|
|
var len = handler.length;
|
|
var listeners = arrayClone(handler, len);
|
|
for (var i = 0; i < len; ++i)
|
|
listeners[i].call(self);
|
|
}
|
|
}
|
|
function emitOne(handler, isFn, self, arg1) {
|
|
if (isFn)
|
|
handler.call(self, arg1);
|
|
else {
|
|
var len = handler.length;
|
|
var listeners = arrayClone(handler, len);
|
|
for (var i = 0; i < len; ++i)
|
|
listeners[i].call(self, arg1);
|
|
}
|
|
}
|
|
function emitTwo(handler, isFn, self, arg1, arg2) {
|
|
if (isFn)
|
|
handler.call(self, arg1, arg2);
|
|
else {
|
|
var len = handler.length;
|
|
var listeners = arrayClone(handler, len);
|
|
for (var i = 0; i < len; ++i)
|
|
listeners[i].call(self, arg1, arg2);
|
|
}
|
|
}
|
|
function emitThree(handler, isFn, self, arg1, arg2, arg3) {
|
|
if (isFn)
|
|
handler.call(self, arg1, arg2, arg3);
|
|
else {
|
|
var len = handler.length;
|
|
var listeners = arrayClone(handler, len);
|
|
for (var i = 0; i < len; ++i)
|
|
listeners[i].call(self, arg1, arg2, arg3);
|
|
}
|
|
}
|
|
|
|
function emitMany(handler, isFn, self, args) {
|
|
if (isFn)
|
|
handler.apply(self, args);
|
|
else {
|
|
var len = handler.length;
|
|
var listeners = arrayClone(handler, len);
|
|
for (var i = 0; i < len; ++i)
|
|
listeners[i].apply(self, args);
|
|
}
|
|
}
|
|
|
|
EventEmitter.prototype.emit = function emit(type) {
|
|
var er, handler, len, args, i, events, domain;
|
|
var doError = (type === 'error');
|
|
|
|
events = this._events;
|
|
if (events)
|
|
doError = (doError && events.error == null);
|
|
else if (!doError)
|
|
return false;
|
|
|
|
domain = this.domain;
|
|
|
|
// If there is no 'error' event listener then throw.
|
|
if (doError) {
|
|
er = arguments[1];
|
|
if (domain) {
|
|
if (!er)
|
|
er = new Error('Uncaught, unspecified "error" event');
|
|
er.domainEmitter = this;
|
|
er.domain = domain;
|
|
er.domainThrown = false;
|
|
domain.emit('error', er);
|
|
} else if (er instanceof Error) {
|
|
throw er; // Unhandled 'error' event
|
|
} else {
|
|
// At least give some kind of context to the user
|
|
var err = new Error('Uncaught, unspecified "error" event. (' + er + ')');
|
|
err.context = er;
|
|
throw err;
|
|
}
|
|
return false;
|
|
}
|
|
|
|
handler = events[type];
|
|
|
|
if (!handler)
|
|
return false;
|
|
|
|
var isFn = typeof handler === 'function';
|
|
len = arguments.length;
|
|
switch (len) {
|
|
// fast cases
|
|
case 1:
|
|
emitNone(handler, isFn, this);
|
|
break;
|
|
case 2:
|
|
emitOne(handler, isFn, this, arguments[1]);
|
|
break;
|
|
case 3:
|
|
emitTwo(handler, isFn, this, arguments[1], arguments[2]);
|
|
break;
|
|
case 4:
|
|
emitThree(handler, isFn, this, arguments[1], arguments[2], arguments[3]);
|
|
break;
|
|
// slower
|
|
default:
|
|
args = new Array(len - 1);
|
|
for (i = 1; i < len; i++)
|
|
args[i - 1] = arguments[i];
|
|
emitMany(handler, isFn, this, args);
|
|
}
|
|
|
|
return true;
|
|
};
|
|
|
|
function _addListener(target, type, listener, prepend) {
|
|
var m;
|
|
var events;
|
|
var existing;
|
|
|
|
if (typeof listener !== 'function')
|
|
throw new TypeError('"listener" argument must be a function');
|
|
|
|
events = target._events;
|
|
if (!events) {
|
|
events = target._events = new EventHandlers();
|
|
target._eventsCount = 0;
|
|
} else {
|
|
// To avoid recursion in the case that type === "newListener"! Before
|
|
// adding it to the listeners, first emit "newListener".
|
|
if (events.newListener) {
|
|
target.emit('newListener', type,
|
|
listener.listener ? listener.listener : listener);
|
|
|
|
// Re-assign `events` because a newListener handler could have caused the
|
|
// this._events to be assigned to a new object
|
|
events = target._events;
|
|
}
|
|
existing = events[type];
|
|
}
|
|
|
|
if (!existing) {
|
|
// Optimize the case of one listener. Don't need the extra array object.
|
|
existing = events[type] = listener;
|
|
++target._eventsCount;
|
|
} else {
|
|
if (typeof existing === 'function') {
|
|
// Adding the second element, need to change to array.
|
|
existing = events[type] = prepend ? [listener, existing] :
|
|
[existing, listener];
|
|
} else {
|
|
// If we've already got an array, just append.
|
|
if (prepend) {
|
|
existing.unshift(listener);
|
|
} else {
|
|
existing.push(listener);
|
|
}
|
|
}
|
|
|
|
// Check for listener leak
|
|
if (!existing.warned) {
|
|
m = $getMaxListeners(target);
|
|
if (m && m > 0 && existing.length > m) {
|
|
existing.warned = true;
|
|
var w = new Error('Possible EventEmitter memory leak detected. ' +
|
|
existing.length + ' ' + type + ' listeners added. ' +
|
|
'Use emitter.setMaxListeners() to increase limit');
|
|
w.name = 'MaxListenersExceededWarning';
|
|
w.emitter = target;
|
|
w.type = type;
|
|
w.count = existing.length;
|
|
emitWarning(w);
|
|
}
|
|
}
|
|
}
|
|
|
|
return target;
|
|
}
|
|
function emitWarning(e) {
|
|
typeof console.warn === 'function' ? console.warn(e) : console.log(e);
|
|
}
|
|
EventEmitter.prototype.addListener = function addListener(type, listener) {
|
|
return _addListener(this, type, listener, false);
|
|
};
|
|
|
|
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
|
|
|
|
EventEmitter.prototype.prependListener =
|
|
function prependListener(type, listener) {
|
|
return _addListener(this, type, listener, true);
|
|
};
|
|
|
|
function _onceWrap(target, type, listener) {
|
|
var fired = false;
|
|
function g() {
|
|
target.removeListener(type, g);
|
|
if (!fired) {
|
|
fired = true;
|
|
listener.apply(target, arguments);
|
|
}
|
|
}
|
|
g.listener = listener;
|
|
return g;
|
|
}
|
|
|
|
EventEmitter.prototype.once = function once(type, listener) {
|
|
if (typeof listener !== 'function')
|
|
throw new TypeError('"listener" argument must be a function');
|
|
this.on(type, _onceWrap(this, type, listener));
|
|
return this;
|
|
};
|
|
|
|
EventEmitter.prototype.prependOnceListener =
|
|
function prependOnceListener(type, listener) {
|
|
if (typeof listener !== 'function')
|
|
throw new TypeError('"listener" argument must be a function');
|
|
this.prependListener(type, _onceWrap(this, type, listener));
|
|
return this;
|
|
};
|
|
|
|
// emits a 'removeListener' event iff the listener was removed
|
|
EventEmitter.prototype.removeListener =
|
|
function removeListener(type, listener) {
|
|
var list, events, position, i, originalListener;
|
|
|
|
if (typeof listener !== 'function')
|
|
throw new TypeError('"listener" argument must be a function');
|
|
|
|
events = this._events;
|
|
if (!events)
|
|
return this;
|
|
|
|
list = events[type];
|
|
if (!list)
|
|
return this;
|
|
|
|
if (list === listener || (list.listener && list.listener === listener)) {
|
|
if (--this._eventsCount === 0)
|
|
this._events = new EventHandlers();
|
|
else {
|
|
delete events[type];
|
|
if (events.removeListener)
|
|
this.emit('removeListener', type, list.listener || listener);
|
|
}
|
|
} else if (typeof list !== 'function') {
|
|
position = -1;
|
|
|
|
for (i = list.length; i-- > 0;) {
|
|
if (list[i] === listener ||
|
|
(list[i].listener && list[i].listener === listener)) {
|
|
originalListener = list[i].listener;
|
|
position = i;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (position < 0)
|
|
return this;
|
|
|
|
if (list.length === 1) {
|
|
list[0] = undefined;
|
|
if (--this._eventsCount === 0) {
|
|
this._events = new EventHandlers();
|
|
return this;
|
|
} else {
|
|
delete events[type];
|
|
}
|
|
} else {
|
|
spliceOne(list, position);
|
|
}
|
|
|
|
if (events.removeListener)
|
|
this.emit('removeListener', type, originalListener || listener);
|
|
}
|
|
|
|
return this;
|
|
};
|
|
|
|
EventEmitter.prototype.removeAllListeners =
|
|
function removeAllListeners(type) {
|
|
var listeners, events;
|
|
|
|
events = this._events;
|
|
if (!events)
|
|
return this;
|
|
|
|
// not listening for removeListener, no need to emit
|
|
if (!events.removeListener) {
|
|
if (arguments.length === 0) {
|
|
this._events = new EventHandlers();
|
|
this._eventsCount = 0;
|
|
} else if (events[type]) {
|
|
if (--this._eventsCount === 0)
|
|
this._events = new EventHandlers();
|
|
else
|
|
delete events[type];
|
|
}
|
|
return this;
|
|
}
|
|
|
|
// emit removeListener for all listeners on all events
|
|
if (arguments.length === 0) {
|
|
var keys = Object.keys(events);
|
|
for (var i = 0, key; i < keys.length; ++i) {
|
|
key = keys[i];
|
|
if (key === 'removeListener') continue;
|
|
this.removeAllListeners(key);
|
|
}
|
|
this.removeAllListeners('removeListener');
|
|
this._events = new EventHandlers();
|
|
this._eventsCount = 0;
|
|
return this;
|
|
}
|
|
|
|
listeners = events[type];
|
|
|
|
if (typeof listeners === 'function') {
|
|
this.removeListener(type, listeners);
|
|
} else if (listeners) {
|
|
// LIFO order
|
|
do {
|
|
this.removeListener(type, listeners[listeners.length - 1]);
|
|
} while (listeners[0]);
|
|
}
|
|
|
|
return this;
|
|
};
|
|
|
|
EventEmitter.prototype.listeners = function listeners(type) {
|
|
var evlistener;
|
|
var ret;
|
|
var events = this._events;
|
|
|
|
if (!events)
|
|
ret = [];
|
|
else {
|
|
evlistener = events[type];
|
|
if (!evlistener)
|
|
ret = [];
|
|
else if (typeof evlistener === 'function')
|
|
ret = [evlistener.listener || evlistener];
|
|
else
|
|
ret = unwrapListeners(evlistener);
|
|
}
|
|
|
|
return ret;
|
|
};
|
|
|
|
EventEmitter.listenerCount = function(emitter, type) {
|
|
if (typeof emitter.listenerCount === 'function') {
|
|
return emitter.listenerCount(type);
|
|
} else {
|
|
return listenerCount.call(emitter, type);
|
|
}
|
|
};
|
|
|
|
EventEmitter.prototype.listenerCount = listenerCount;
|
|
function listenerCount(type) {
|
|
var events = this._events;
|
|
|
|
if (events) {
|
|
var evlistener = events[type];
|
|
|
|
if (typeof evlistener === 'function') {
|
|
return 1;
|
|
} else if (evlistener) {
|
|
return evlistener.length;
|
|
}
|
|
}
|
|
|
|
return 0;
|
|
}
|
|
|
|
EventEmitter.prototype.eventNames = function eventNames() {
|
|
return this._eventsCount > 0 ? Reflect.ownKeys(this._events) : [];
|
|
};
|
|
|
|
// About 1.5x faster than the two-arg version of Array#splice().
|
|
function spliceOne(list, index) {
|
|
for (var i = index, k = i + 1, n = list.length; k < n; i += 1, k += 1)
|
|
list[i] = list[k];
|
|
list.pop();
|
|
}
|
|
|
|
function arrayClone(arr, i) {
|
|
var copy = new Array(i);
|
|
while (i--)
|
|
copy[i] = arr[i];
|
|
return copy;
|
|
}
|
|
|
|
function unwrapListeners(arr) {
|
|
var ret = new Array(arr.length);
|
|
for (var i = 0; i < ret.length; ++i) {
|
|
ret[i] = arr[i].listener || arr[i];
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
var global$1 = (typeof global !== "undefined" ? global :
|
|
typeof self !== "undefined" ? self :
|
|
typeof window !== "undefined" ? window : {});
|
|
|
|
var lookup = [];
|
|
var revLookup = [];
|
|
var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array;
|
|
var inited = false;
|
|
function init () {
|
|
inited = true;
|
|
var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
|
|
for (var i = 0, len = code.length; i < len; ++i) {
|
|
lookup[i] = code[i];
|
|
revLookup[code.charCodeAt(i)] = i;
|
|
}
|
|
|
|
revLookup['-'.charCodeAt(0)] = 62;
|
|
revLookup['_'.charCodeAt(0)] = 63;
|
|
}
|
|
|
|
function toByteArray (b64) {
|
|
if (!inited) {
|
|
init();
|
|
}
|
|
var i, j, l, tmp, placeHolders, arr;
|
|
var len = b64.length;
|
|
|
|
if (len % 4 > 0) {
|
|
throw new Error('Invalid string. Length must be a multiple of 4')
|
|
}
|
|
|
|
// the number of equal signs (place holders)
|
|
// if there are two placeholders, than the two characters before it
|
|
// represent one byte
|
|
// if there is only one, then the three characters before it represent 2 bytes
|
|
// this is just a cheap hack to not do indexOf twice
|
|
placeHolders = b64[len - 2] === '=' ? 2 : b64[len - 1] === '=' ? 1 : 0;
|
|
|
|
// base64 is 4/3 + up to two characters of the original data
|
|
arr = new Arr(len * 3 / 4 - placeHolders);
|
|
|
|
// if there are placeholders, only get up to the last complete 4 chars
|
|
l = placeHolders > 0 ? len - 4 : len;
|
|
|
|
var L = 0;
|
|
|
|
for (i = 0, j = 0; i < l; i += 4, j += 3) {
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)];
|
|
arr[L++] = (tmp >> 16) & 0xFF;
|
|
arr[L++] = (tmp >> 8) & 0xFF;
|
|
arr[L++] = tmp & 0xFF;
|
|
}
|
|
|
|
if (placeHolders === 2) {
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4);
|
|
arr[L++] = tmp & 0xFF;
|
|
} else if (placeHolders === 1) {
|
|
tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2);
|
|
arr[L++] = (tmp >> 8) & 0xFF;
|
|
arr[L++] = tmp & 0xFF;
|
|
}
|
|
|
|
return arr
|
|
}
|
|
|
|
function tripletToBase64 (num) {
|
|
return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F]
|
|
}
|
|
|
|
function encodeChunk (uint8, start, end) {
|
|
var tmp;
|
|
var output = [];
|
|
for (var i = start; i < end; i += 3) {
|
|
tmp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]);
|
|
output.push(tripletToBase64(tmp));
|
|
}
|
|
return output.join('')
|
|
}
|
|
|
|
function fromByteArray (uint8) {
|
|
if (!inited) {
|
|
init();
|
|
}
|
|
var tmp;
|
|
var len = uint8.length;
|
|
var extraBytes = len % 3; // if we have 1 byte left, pad 2 bytes
|
|
var output = '';
|
|
var parts = [];
|
|
var maxChunkLength = 16383; // must be multiple of 3
|
|
|
|
// go through the array every three bytes, we'll deal with trailing stuff later
|
|
for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) {
|
|
parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength)));
|
|
}
|
|
|
|
// pad the end with zeros, but make sure to not forget the extra bytes
|
|
if (extraBytes === 1) {
|
|
tmp = uint8[len - 1];
|
|
output += lookup[tmp >> 2];
|
|
output += lookup[(tmp << 4) & 0x3F];
|
|
output += '==';
|
|
} else if (extraBytes === 2) {
|
|
tmp = (uint8[len - 2] << 8) + (uint8[len - 1]);
|
|
output += lookup[tmp >> 10];
|
|
output += lookup[(tmp >> 4) & 0x3F];
|
|
output += lookup[(tmp << 2) & 0x3F];
|
|
output += '=';
|
|
}
|
|
|
|
parts.push(output);
|
|
|
|
return parts.join('')
|
|
}
|
|
|
|
function read (buffer, offset, isLE, mLen, nBytes) {
|
|
var e, m;
|
|
var eLen = nBytes * 8 - mLen - 1;
|
|
var eMax = (1 << eLen) - 1;
|
|
var eBias = eMax >> 1;
|
|
var nBits = -7;
|
|
var i = isLE ? (nBytes - 1) : 0;
|
|
var d = isLE ? -1 : 1;
|
|
var s = buffer[offset + i];
|
|
|
|
i += d;
|
|
|
|
e = s & ((1 << (-nBits)) - 1);
|
|
s >>= (-nBits);
|
|
nBits += eLen;
|
|
for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {}
|
|
|
|
m = e & ((1 << (-nBits)) - 1);
|
|
e >>= (-nBits);
|
|
nBits += mLen;
|
|
for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {}
|
|
|
|
if (e === 0) {
|
|
e = 1 - eBias;
|
|
} else if (e === eMax) {
|
|
return m ? NaN : ((s ? -1 : 1) * Infinity)
|
|
} else {
|
|
m = m + Math.pow(2, mLen);
|
|
e = e - eBias;
|
|
}
|
|
return (s ? -1 : 1) * m * Math.pow(2, e - mLen)
|
|
}
|
|
|
|
function write (buffer, value, offset, isLE, mLen, nBytes) {
|
|
var e, m, c;
|
|
var eLen = nBytes * 8 - mLen - 1;
|
|
var eMax = (1 << eLen) - 1;
|
|
var eBias = eMax >> 1;
|
|
var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0);
|
|
var i = isLE ? 0 : (nBytes - 1);
|
|
var d = isLE ? 1 : -1;
|
|
var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0;
|
|
|
|
value = Math.abs(value);
|
|
|
|
if (isNaN(value) || value === Infinity) {
|
|
m = isNaN(value) ? 1 : 0;
|
|
e = eMax;
|
|
} else {
|
|
e = Math.floor(Math.log(value) / Math.LN2);
|
|
if (value * (c = Math.pow(2, -e)) < 1) {
|
|
e--;
|
|
c *= 2;
|
|
}
|
|
if (e + eBias >= 1) {
|
|
value += rt / c;
|
|
} else {
|
|
value += rt * Math.pow(2, 1 - eBias);
|
|
}
|
|
if (value * c >= 2) {
|
|
e++;
|
|
c /= 2;
|
|
}
|
|
|
|
if (e + eBias >= eMax) {
|
|
m = 0;
|
|
e = eMax;
|
|
} else if (e + eBias >= 1) {
|
|
m = (value * c - 1) * Math.pow(2, mLen);
|
|
e = e + eBias;
|
|
} else {
|
|
m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen);
|
|
e = 0;
|
|
}
|
|
}
|
|
|
|
for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {}
|
|
|
|
e = (e << mLen) | m;
|
|
eLen += mLen;
|
|
for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {}
|
|
|
|
buffer[offset + i - d] |= s * 128;
|
|
}
|
|
|
|
var toString = {}.toString;
|
|
|
|
var isArray = Array.isArray || function (arr) {
|
|
return toString.call(arr) == '[object Array]';
|
|
};
|
|
|
|
var INSPECT_MAX_BYTES = 50;
|
|
|
|
/**
|
|
* If `Buffer.TYPED_ARRAY_SUPPORT`:
|
|
* === true Use Uint8Array implementation (fastest)
|
|
* === false Use Object implementation (most compatible, even IE6)
|
|
*
|
|
* Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+,
|
|
* Opera 11.6+, iOS 4.2+.
|
|
*
|
|
* Due to various browser bugs, sometimes the Object implementation will be used even
|
|
* when the browser supports typed arrays.
|
|
*
|
|
* Note:
|
|
*
|
|
* - Firefox 4-29 lacks support for adding new properties to `Uint8Array` instances,
|
|
* See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438.
|
|
*
|
|
* - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function.
|
|
*
|
|
* - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of
|
|
* incorrect length in some situations.
|
|
|
|
* We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they
|
|
* get the Object implementation, which is slower but behaves correctly.
|
|
*/
|
|
Buffer.TYPED_ARRAY_SUPPORT = global$1.TYPED_ARRAY_SUPPORT !== undefined
|
|
? global$1.TYPED_ARRAY_SUPPORT
|
|
: true;
|
|
|
|
function kMaxLength () {
|
|
return Buffer.TYPED_ARRAY_SUPPORT
|
|
? 0x7fffffff
|
|
: 0x3fffffff
|
|
}
|
|
|
|
function createBuffer (that, length) {
|
|
if (kMaxLength() < length) {
|
|
throw new RangeError('Invalid typed array length')
|
|
}
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
// Return an augmented `Uint8Array` instance, for best performance
|
|
that = new Uint8Array(length);
|
|
that.__proto__ = Buffer.prototype;
|
|
} else {
|
|
// Fallback: Return an object instance of the Buffer class
|
|
if (that === null) {
|
|
that = new Buffer(length);
|
|
}
|
|
that.length = length;
|
|
}
|
|
|
|
return that
|
|
}
|
|
|
|
/**
|
|
* The Buffer constructor returns instances of `Uint8Array` that have their
|
|
* prototype changed to `Buffer.prototype`. Furthermore, `Buffer` is a subclass of
|
|
* `Uint8Array`, so the returned instances will have all the node `Buffer` methods
|
|
* and the `Uint8Array` methods. Square bracket notation works as expected -- it
|
|
* returns a single octet.
|
|
*
|
|
* The `Uint8Array` prototype remains unmodified.
|
|
*/
|
|
|
|
function Buffer (arg, encodingOrOffset, length) {
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT && !(this instanceof Buffer)) {
|
|
return new Buffer(arg, encodingOrOffset, length)
|
|
}
|
|
|
|
// Common case.
|
|
if (typeof arg === 'number') {
|
|
if (typeof encodingOrOffset === 'string') {
|
|
throw new Error(
|
|
'If encoding is specified then the first argument must be a string'
|
|
)
|
|
}
|
|
return allocUnsafe(this, arg)
|
|
}
|
|
return from(this, arg, encodingOrOffset, length)
|
|
}
|
|
|
|
Buffer.poolSize = 8192; // not used by this implementation
|
|
|
|
// TODO: Legacy, not needed anymore. Remove in next major version.
|
|
Buffer._augment = function (arr) {
|
|
arr.__proto__ = Buffer.prototype;
|
|
return arr
|
|
};
|
|
|
|
function from (that, value, encodingOrOffset, length) {
|
|
if (typeof value === 'number') {
|
|
throw new TypeError('"value" argument must not be a number')
|
|
}
|
|
|
|
if (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) {
|
|
return fromArrayBuffer(that, value, encodingOrOffset, length)
|
|
}
|
|
|
|
if (typeof value === 'string') {
|
|
return fromString(that, value, encodingOrOffset)
|
|
}
|
|
|
|
return fromObject(that, value)
|
|
}
|
|
|
|
/**
|
|
* Functionally equivalent to Buffer(arg, encoding) but throws a TypeError
|
|
* if value is a number.
|
|
* Buffer.from(str[, encoding])
|
|
* Buffer.from(array)
|
|
* Buffer.from(buffer)
|
|
* Buffer.from(arrayBuffer[, byteOffset[, length]])
|
|
**/
|
|
Buffer.from = function (value, encodingOrOffset, length) {
|
|
return from(null, value, encodingOrOffset, length)
|
|
};
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
Buffer.prototype.__proto__ = Uint8Array.prototype;
|
|
Buffer.__proto__ = Uint8Array;
|
|
}
|
|
|
|
function assertSize (size) {
|
|
if (typeof size !== 'number') {
|
|
throw new TypeError('"size" argument must be a number')
|
|
} else if (size < 0) {
|
|
throw new RangeError('"size" argument must not be negative')
|
|
}
|
|
}
|
|
|
|
function alloc (that, size, fill, encoding) {
|
|
assertSize(size);
|
|
if (size <= 0) {
|
|
return createBuffer(that, size)
|
|
}
|
|
if (fill !== undefined) {
|
|
// Only pay attention to encoding if it's a string. This
|
|
// prevents accidentally sending in a number that would
|
|
// be interpretted as a start offset.
|
|
return typeof encoding === 'string'
|
|
? createBuffer(that, size).fill(fill, encoding)
|
|
: createBuffer(that, size).fill(fill)
|
|
}
|
|
return createBuffer(that, size)
|
|
}
|
|
|
|
/**
|
|
* Creates a new filled Buffer instance.
|
|
* alloc(size[, fill[, encoding]])
|
|
**/
|
|
Buffer.alloc = function (size, fill, encoding) {
|
|
return alloc(null, size, fill, encoding)
|
|
};
|
|
|
|
function allocUnsafe (that, size) {
|
|
assertSize(size);
|
|
that = createBuffer(that, size < 0 ? 0 : checked(size) | 0);
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) {
|
|
for (var i = 0; i < size; ++i) {
|
|
that[i] = 0;
|
|
}
|
|
}
|
|
return that
|
|
}
|
|
|
|
/**
|
|
* Equivalent to Buffer(num), by default creates a non-zero-filled Buffer instance.
|
|
* */
|
|
Buffer.allocUnsafe = function (size) {
|
|
return allocUnsafe(null, size)
|
|
};
|
|
/**
|
|
* Equivalent to SlowBuffer(num), by default creates a non-zero-filled Buffer instance.
|
|
*/
|
|
Buffer.allocUnsafeSlow = function (size) {
|
|
return allocUnsafe(null, size)
|
|
};
|
|
|
|
function fromString (that, string, encoding) {
|
|
if (typeof encoding !== 'string' || encoding === '') {
|
|
encoding = 'utf8';
|
|
}
|
|
|
|
if (!Buffer.isEncoding(encoding)) {
|
|
throw new TypeError('"encoding" must be a valid string encoding')
|
|
}
|
|
|
|
var length = byteLength(string, encoding) | 0;
|
|
that = createBuffer(that, length);
|
|
|
|
var actual = that.write(string, encoding);
|
|
|
|
if (actual !== length) {
|
|
// Writing a hex string, for example, that contains invalid characters will
|
|
// cause everything after the first invalid character to be ignored. (e.g.
|
|
// 'abxxcd' will be treated as 'ab')
|
|
that = that.slice(0, actual);
|
|
}
|
|
|
|
return that
|
|
}
|
|
|
|
function fromArrayLike (that, array) {
|
|
var length = array.length < 0 ? 0 : checked(array.length) | 0;
|
|
that = createBuffer(that, length);
|
|
for (var i = 0; i < length; i += 1) {
|
|
that[i] = array[i] & 255;
|
|
}
|
|
return that
|
|
}
|
|
|
|
function fromArrayBuffer (that, array, byteOffset, length) {
|
|
array.byteLength; // this throws if `array` is not a valid ArrayBuffer
|
|
|
|
if (byteOffset < 0 || array.byteLength < byteOffset) {
|
|
throw new RangeError('\'offset\' is out of bounds')
|
|
}
|
|
|
|
if (array.byteLength < byteOffset + (length || 0)) {
|
|
throw new RangeError('\'length\' is out of bounds')
|
|
}
|
|
|
|
if (byteOffset === undefined && length === undefined) {
|
|
array = new Uint8Array(array);
|
|
} else if (length === undefined) {
|
|
array = new Uint8Array(array, byteOffset);
|
|
} else {
|
|
array = new Uint8Array(array, byteOffset, length);
|
|
}
|
|
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
// Return an augmented `Uint8Array` instance, for best performance
|
|
that = array;
|
|
that.__proto__ = Buffer.prototype;
|
|
} else {
|
|
// Fallback: Return an object instance of the Buffer class
|
|
that = fromArrayLike(that, array);
|
|
}
|
|
return that
|
|
}
|
|
|
|
function fromObject (that, obj) {
|
|
if (internalIsBuffer(obj)) {
|
|
var len = checked(obj.length) | 0;
|
|
that = createBuffer(that, len);
|
|
|
|
if (that.length === 0) {
|
|
return that
|
|
}
|
|
|
|
obj.copy(that, 0, 0, len);
|
|
return that
|
|
}
|
|
|
|
if (obj) {
|
|
if ((typeof ArrayBuffer !== 'undefined' &&
|
|
obj.buffer instanceof ArrayBuffer) || 'length' in obj) {
|
|
if (typeof obj.length !== 'number' || isnan(obj.length)) {
|
|
return createBuffer(that, 0)
|
|
}
|
|
return fromArrayLike(that, obj)
|
|
}
|
|
|
|
if (obj.type === 'Buffer' && isArray(obj.data)) {
|
|
return fromArrayLike(that, obj.data)
|
|
}
|
|
}
|
|
|
|
throw new TypeError('First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.')
|
|
}
|
|
|
|
function checked (length) {
|
|
// Note: cannot use `length < kMaxLength()` here because that fails when
|
|
// length is NaN (which is otherwise coerced to zero.)
|
|
if (length >= kMaxLength()) {
|
|
throw new RangeError('Attempt to allocate Buffer larger than maximum ' +
|
|
'size: 0x' + kMaxLength().toString(16) + ' bytes')
|
|
}
|
|
return length | 0
|
|
}
|
|
Buffer.isBuffer = isBuffer;
|
|
function internalIsBuffer (b) {
|
|
return !!(b != null && b._isBuffer)
|
|
}
|
|
|
|
Buffer.compare = function compare (a, b) {
|
|
if (!internalIsBuffer(a) || !internalIsBuffer(b)) {
|
|
throw new TypeError('Arguments must be Buffers')
|
|
}
|
|
|
|
if (a === b) return 0
|
|
|
|
var x = a.length;
|
|
var y = b.length;
|
|
|
|
for (var i = 0, len = Math.min(x, y); i < len; ++i) {
|
|
if (a[i] !== b[i]) {
|
|
x = a[i];
|
|
y = b[i];
|
|
break
|
|
}
|
|
}
|
|
|
|
if (x < y) return -1
|
|
if (y < x) return 1
|
|
return 0
|
|
};
|
|
|
|
Buffer.isEncoding = function isEncoding (encoding) {
|
|
switch (String(encoding).toLowerCase()) {
|
|
case 'hex':
|
|
case 'utf8':
|
|
case 'utf-8':
|
|
case 'ascii':
|
|
case 'latin1':
|
|
case 'binary':
|
|
case 'base64':
|
|
case 'ucs2':
|
|
case 'ucs-2':
|
|
case 'utf16le':
|
|
case 'utf-16le':
|
|
return true
|
|
default:
|
|
return false
|
|
}
|
|
};
|
|
|
|
Buffer.concat = function concat (list, length) {
|
|
if (!isArray(list)) {
|
|
throw new TypeError('"list" argument must be an Array of Buffers')
|
|
}
|
|
|
|
if (list.length === 0) {
|
|
return Buffer.alloc(0)
|
|
}
|
|
|
|
var i;
|
|
if (length === undefined) {
|
|
length = 0;
|
|
for (i = 0; i < list.length; ++i) {
|
|
length += list[i].length;
|
|
}
|
|
}
|
|
|
|
var buffer = Buffer.allocUnsafe(length);
|
|
var pos = 0;
|
|
for (i = 0; i < list.length; ++i) {
|
|
var buf = list[i];
|
|
if (!internalIsBuffer(buf)) {
|
|
throw new TypeError('"list" argument must be an Array of Buffers')
|
|
}
|
|
buf.copy(buffer, pos);
|
|
pos += buf.length;
|
|
}
|
|
return buffer
|
|
};
|
|
|
|
function byteLength (string, encoding) {
|
|
if (internalIsBuffer(string)) {
|
|
return string.length
|
|
}
|
|
if (typeof ArrayBuffer !== 'undefined' && typeof ArrayBuffer.isView === 'function' &&
|
|
(ArrayBuffer.isView(string) || string instanceof ArrayBuffer)) {
|
|
return string.byteLength
|
|
}
|
|
if (typeof string !== 'string') {
|
|
string = '' + string;
|
|
}
|
|
|
|
var len = string.length;
|
|
if (len === 0) return 0
|
|
|
|
// Use a for loop to avoid recursion
|
|
var loweredCase = false;
|
|
for (;;) {
|
|
switch (encoding) {
|
|
case 'ascii':
|
|
case 'latin1':
|
|
case 'binary':
|
|
return len
|
|
case 'utf8':
|
|
case 'utf-8':
|
|
case undefined:
|
|
return utf8ToBytes(string).length
|
|
case 'ucs2':
|
|
case 'ucs-2':
|
|
case 'utf16le':
|
|
case 'utf-16le':
|
|
return len * 2
|
|
case 'hex':
|
|
return len >>> 1
|
|
case 'base64':
|
|
return base64ToBytes(string).length
|
|
default:
|
|
if (loweredCase) return utf8ToBytes(string).length // assume utf8
|
|
encoding = ('' + encoding).toLowerCase();
|
|
loweredCase = true;
|
|
}
|
|
}
|
|
}
|
|
Buffer.byteLength = byteLength;
|
|
|
|
function slowToString (encoding, start, end) {
|
|
var loweredCase = false;
|
|
|
|
// No need to verify that "this.length <= MAX_UINT32" since it's a read-only
|
|
// property of a typed array.
|
|
|
|
// This behaves neither like String nor Uint8Array in that we set start/end
|
|
// to their upper/lower bounds if the value passed is out of range.
|
|
// undefined is handled specially as per ECMA-262 6th Edition,
|
|
// Section 13.3.3.7 Runtime Semantics: KeyedBindingInitialization.
|
|
if (start === undefined || start < 0) {
|
|
start = 0;
|
|
}
|
|
// Return early if start > this.length. Done here to prevent potential uint32
|
|
// coercion fail below.
|
|
if (start > this.length) {
|
|
return ''
|
|
}
|
|
|
|
if (end === undefined || end > this.length) {
|
|
end = this.length;
|
|
}
|
|
|
|
if (end <= 0) {
|
|
return ''
|
|
}
|
|
|
|
// Force coersion to uint32. This will also coerce falsey/NaN values to 0.
|
|
end >>>= 0;
|
|
start >>>= 0;
|
|
|
|
if (end <= start) {
|
|
return ''
|
|
}
|
|
|
|
if (!encoding) encoding = 'utf8';
|
|
|
|
while (true) {
|
|
switch (encoding) {
|
|
case 'hex':
|
|
return hexSlice(this, start, end)
|
|
|
|
case 'utf8':
|
|
case 'utf-8':
|
|
return utf8Slice(this, start, end)
|
|
|
|
case 'ascii':
|
|
return asciiSlice(this, start, end)
|
|
|
|
case 'latin1':
|
|
case 'binary':
|
|
return latin1Slice(this, start, end)
|
|
|
|
case 'base64':
|
|
return base64Slice(this, start, end)
|
|
|
|
case 'ucs2':
|
|
case 'ucs-2':
|
|
case 'utf16le':
|
|
case 'utf-16le':
|
|
return utf16leSlice(this, start, end)
|
|
|
|
default:
|
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
|
encoding = (encoding + '').toLowerCase();
|
|
loweredCase = true;
|
|
}
|
|
}
|
|
}
|
|
|
|
// The property is used by `Buffer.isBuffer` and `is-buffer` (in Safari 5-7) to detect
|
|
// Buffer instances.
|
|
Buffer.prototype._isBuffer = true;
|
|
|
|
function swap (b, n, m) {
|
|
var i = b[n];
|
|
b[n] = b[m];
|
|
b[m] = i;
|
|
}
|
|
|
|
Buffer.prototype.swap16 = function swap16 () {
|
|
var len = this.length;
|
|
if (len % 2 !== 0) {
|
|
throw new RangeError('Buffer size must be a multiple of 16-bits')
|
|
}
|
|
for (var i = 0; i < len; i += 2) {
|
|
swap(this, i, i + 1);
|
|
}
|
|
return this
|
|
};
|
|
|
|
Buffer.prototype.swap32 = function swap32 () {
|
|
var len = this.length;
|
|
if (len % 4 !== 0) {
|
|
throw new RangeError('Buffer size must be a multiple of 32-bits')
|
|
}
|
|
for (var i = 0; i < len; i += 4) {
|
|
swap(this, i, i + 3);
|
|
swap(this, i + 1, i + 2);
|
|
}
|
|
return this
|
|
};
|
|
|
|
Buffer.prototype.swap64 = function swap64 () {
|
|
var len = this.length;
|
|
if (len % 8 !== 0) {
|
|
throw new RangeError('Buffer size must be a multiple of 64-bits')
|
|
}
|
|
for (var i = 0; i < len; i += 8) {
|
|
swap(this, i, i + 7);
|
|
swap(this, i + 1, i + 6);
|
|
swap(this, i + 2, i + 5);
|
|
swap(this, i + 3, i + 4);
|
|
}
|
|
return this
|
|
};
|
|
|
|
Buffer.prototype.toString = function toString () {
|
|
var length = this.length | 0;
|
|
if (length === 0) return ''
|
|
if (arguments.length === 0) return utf8Slice(this, 0, length)
|
|
return slowToString.apply(this, arguments)
|
|
};
|
|
|
|
Buffer.prototype.equals = function equals (b) {
|
|
if (!internalIsBuffer(b)) throw new TypeError('Argument must be a Buffer')
|
|
if (this === b) return true
|
|
return Buffer.compare(this, b) === 0
|
|
};
|
|
|
|
Buffer.prototype.inspect = function inspect () {
|
|
var str = '';
|
|
var max = INSPECT_MAX_BYTES;
|
|
if (this.length > 0) {
|
|
str = this.toString('hex', 0, max).match(/.{2}/g).join(' ');
|
|
if (this.length > max) str += ' ... ';
|
|
}
|
|
return '<Buffer ' + str + '>'
|
|
};
|
|
|
|
Buffer.prototype.compare = function compare (target, start, end, thisStart, thisEnd) {
|
|
if (!internalIsBuffer(target)) {
|
|
throw new TypeError('Argument must be a Buffer')
|
|
}
|
|
|
|
if (start === undefined) {
|
|
start = 0;
|
|
}
|
|
if (end === undefined) {
|
|
end = target ? target.length : 0;
|
|
}
|
|
if (thisStart === undefined) {
|
|
thisStart = 0;
|
|
}
|
|
if (thisEnd === undefined) {
|
|
thisEnd = this.length;
|
|
}
|
|
|
|
if (start < 0 || end > target.length || thisStart < 0 || thisEnd > this.length) {
|
|
throw new RangeError('out of range index')
|
|
}
|
|
|
|
if (thisStart >= thisEnd && start >= end) {
|
|
return 0
|
|
}
|
|
if (thisStart >= thisEnd) {
|
|
return -1
|
|
}
|
|
if (start >= end) {
|
|
return 1
|
|
}
|
|
|
|
start >>>= 0;
|
|
end >>>= 0;
|
|
thisStart >>>= 0;
|
|
thisEnd >>>= 0;
|
|
|
|
if (this === target) return 0
|
|
|
|
var x = thisEnd - thisStart;
|
|
var y = end - start;
|
|
var len = Math.min(x, y);
|
|
|
|
var thisCopy = this.slice(thisStart, thisEnd);
|
|
var targetCopy = target.slice(start, end);
|
|
|
|
for (var i = 0; i < len; ++i) {
|
|
if (thisCopy[i] !== targetCopy[i]) {
|
|
x = thisCopy[i];
|
|
y = targetCopy[i];
|
|
break
|
|
}
|
|
}
|
|
|
|
if (x < y) return -1
|
|
if (y < x) return 1
|
|
return 0
|
|
};
|
|
|
|
// Finds either the first index of `val` in `buffer` at offset >= `byteOffset`,
|
|
// OR the last index of `val` in `buffer` at offset <= `byteOffset`.
|
|
//
|
|
// Arguments:
|
|
// - buffer - a Buffer to search
|
|
// - val - a string, Buffer, or number
|
|
// - byteOffset - an index into `buffer`; will be clamped to an int32
|
|
// - encoding - an optional encoding, relevant is val is a string
|
|
// - dir - true for indexOf, false for lastIndexOf
|
|
function bidirectionalIndexOf (buffer, val, byteOffset, encoding, dir) {
|
|
// Empty buffer means no match
|
|
if (buffer.length === 0) return -1
|
|
|
|
// Normalize byteOffset
|
|
if (typeof byteOffset === 'string') {
|
|
encoding = byteOffset;
|
|
byteOffset = 0;
|
|
} else if (byteOffset > 0x7fffffff) {
|
|
byteOffset = 0x7fffffff;
|
|
} else if (byteOffset < -0x80000000) {
|
|
byteOffset = -0x80000000;
|
|
}
|
|
byteOffset = +byteOffset; // Coerce to Number.
|
|
if (isNaN(byteOffset)) {
|
|
// byteOffset: it it's undefined, null, NaN, "foo", etc, search whole buffer
|
|
byteOffset = dir ? 0 : (buffer.length - 1);
|
|
}
|
|
|
|
// Normalize byteOffset: negative offsets start from the end of the buffer
|
|
if (byteOffset < 0) byteOffset = buffer.length + byteOffset;
|
|
if (byteOffset >= buffer.length) {
|
|
if (dir) return -1
|
|
else byteOffset = buffer.length - 1;
|
|
} else if (byteOffset < 0) {
|
|
if (dir) byteOffset = 0;
|
|
else return -1
|
|
}
|
|
|
|
// Normalize val
|
|
if (typeof val === 'string') {
|
|
val = Buffer.from(val, encoding);
|
|
}
|
|
|
|
// Finally, search either indexOf (if dir is true) or lastIndexOf
|
|
if (internalIsBuffer(val)) {
|
|
// Special case: looking for empty string/buffer always fails
|
|
if (val.length === 0) {
|
|
return -1
|
|
}
|
|
return arrayIndexOf(buffer, val, byteOffset, encoding, dir)
|
|
} else if (typeof val === 'number') {
|
|
val = val & 0xFF; // Search for a byte value [0-255]
|
|
if (Buffer.TYPED_ARRAY_SUPPORT &&
|
|
typeof Uint8Array.prototype.indexOf === 'function') {
|
|
if (dir) {
|
|
return Uint8Array.prototype.indexOf.call(buffer, val, byteOffset)
|
|
} else {
|
|
return Uint8Array.prototype.lastIndexOf.call(buffer, val, byteOffset)
|
|
}
|
|
}
|
|
return arrayIndexOf(buffer, [ val ], byteOffset, encoding, dir)
|
|
}
|
|
|
|
throw new TypeError('val must be string, number or Buffer')
|
|
}
|
|
|
|
function arrayIndexOf (arr, val, byteOffset, encoding, dir) {
|
|
var indexSize = 1;
|
|
var arrLength = arr.length;
|
|
var valLength = val.length;
|
|
|
|
if (encoding !== undefined) {
|
|
encoding = String(encoding).toLowerCase();
|
|
if (encoding === 'ucs2' || encoding === 'ucs-2' ||
|
|
encoding === 'utf16le' || encoding === 'utf-16le') {
|
|
if (arr.length < 2 || val.length < 2) {
|
|
return -1
|
|
}
|
|
indexSize = 2;
|
|
arrLength /= 2;
|
|
valLength /= 2;
|
|
byteOffset /= 2;
|
|
}
|
|
}
|
|
|
|
function read (buf, i) {
|
|
if (indexSize === 1) {
|
|
return buf[i]
|
|
} else {
|
|
return buf.readUInt16BE(i * indexSize)
|
|
}
|
|
}
|
|
|
|
var i;
|
|
if (dir) {
|
|
var foundIndex = -1;
|
|
for (i = byteOffset; i < arrLength; i++) {
|
|
if (read(arr, i) === read(val, foundIndex === -1 ? 0 : i - foundIndex)) {
|
|
if (foundIndex === -1) foundIndex = i;
|
|
if (i - foundIndex + 1 === valLength) return foundIndex * indexSize
|
|
} else {
|
|
if (foundIndex !== -1) i -= i - foundIndex;
|
|
foundIndex = -1;
|
|
}
|
|
}
|
|
} else {
|
|
if (byteOffset + valLength > arrLength) byteOffset = arrLength - valLength;
|
|
for (i = byteOffset; i >= 0; i--) {
|
|
var found = true;
|
|
for (var j = 0; j < valLength; j++) {
|
|
if (read(arr, i + j) !== read(val, j)) {
|
|
found = false;
|
|
break
|
|
}
|
|
}
|
|
if (found) return i
|
|
}
|
|
}
|
|
|
|
return -1
|
|
}
|
|
|
|
Buffer.prototype.includes = function includes (val, byteOffset, encoding) {
|
|
return this.indexOf(val, byteOffset, encoding) !== -1
|
|
};
|
|
|
|
Buffer.prototype.indexOf = function indexOf (val, byteOffset, encoding) {
|
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, true)
|
|
};
|
|
|
|
Buffer.prototype.lastIndexOf = function lastIndexOf (val, byteOffset, encoding) {
|
|
return bidirectionalIndexOf(this, val, byteOffset, encoding, false)
|
|
};
|
|
|
|
function hexWrite (buf, string, offset, length) {
|
|
offset = Number(offset) || 0;
|
|
var remaining = buf.length - offset;
|
|
if (!length) {
|
|
length = remaining;
|
|
} else {
|
|
length = Number(length);
|
|
if (length > remaining) {
|
|
length = remaining;
|
|
}
|
|
}
|
|
|
|
// must be an even number of digits
|
|
var strLen = string.length;
|
|
if (strLen % 2 !== 0) throw new TypeError('Invalid hex string')
|
|
|
|
if (length > strLen / 2) {
|
|
length = strLen / 2;
|
|
}
|
|
for (var i = 0; i < length; ++i) {
|
|
var parsed = parseInt(string.substr(i * 2, 2), 16);
|
|
if (isNaN(parsed)) return i
|
|
buf[offset + i] = parsed;
|
|
}
|
|
return i
|
|
}
|
|
|
|
function utf8Write (buf, string, offset, length) {
|
|
return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length)
|
|
}
|
|
|
|
function asciiWrite (buf, string, offset, length) {
|
|
return blitBuffer(asciiToBytes(string), buf, offset, length)
|
|
}
|
|
|
|
function latin1Write (buf, string, offset, length) {
|
|
return asciiWrite(buf, string, offset, length)
|
|
}
|
|
|
|
function base64Write (buf, string, offset, length) {
|
|
return blitBuffer(base64ToBytes(string), buf, offset, length)
|
|
}
|
|
|
|
function ucs2Write (buf, string, offset, length) {
|
|
return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length)
|
|
}
|
|
|
|
Buffer.prototype.write = function write (string, offset, length, encoding) {
|
|
// Buffer#write(string)
|
|
if (offset === undefined) {
|
|
encoding = 'utf8';
|
|
length = this.length;
|
|
offset = 0;
|
|
// Buffer#write(string, encoding)
|
|
} else if (length === undefined && typeof offset === 'string') {
|
|
encoding = offset;
|
|
length = this.length;
|
|
offset = 0;
|
|
// Buffer#write(string, offset[, length][, encoding])
|
|
} else if (isFinite(offset)) {
|
|
offset = offset | 0;
|
|
if (isFinite(length)) {
|
|
length = length | 0;
|
|
if (encoding === undefined) encoding = 'utf8';
|
|
} else {
|
|
encoding = length;
|
|
length = undefined;
|
|
}
|
|
// legacy write(string, encoding, offset, length) - remove in v0.13
|
|
} else {
|
|
throw new Error(
|
|
'Buffer.write(string, encoding, offset[, length]) is no longer supported'
|
|
)
|
|
}
|
|
|
|
var remaining = this.length - offset;
|
|
if (length === undefined || length > remaining) length = remaining;
|
|
|
|
if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) {
|
|
throw new RangeError('Attempt to write outside buffer bounds')
|
|
}
|
|
|
|
if (!encoding) encoding = 'utf8';
|
|
|
|
var loweredCase = false;
|
|
for (;;) {
|
|
switch (encoding) {
|
|
case 'hex':
|
|
return hexWrite(this, string, offset, length)
|
|
|
|
case 'utf8':
|
|
case 'utf-8':
|
|
return utf8Write(this, string, offset, length)
|
|
|
|
case 'ascii':
|
|
return asciiWrite(this, string, offset, length)
|
|
|
|
case 'latin1':
|
|
case 'binary':
|
|
return latin1Write(this, string, offset, length)
|
|
|
|
case 'base64':
|
|
// Warning: maxLength not taken into account in base64Write
|
|
return base64Write(this, string, offset, length)
|
|
|
|
case 'ucs2':
|
|
case 'ucs-2':
|
|
case 'utf16le':
|
|
case 'utf-16le':
|
|
return ucs2Write(this, string, offset, length)
|
|
|
|
default:
|
|
if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding)
|
|
encoding = ('' + encoding).toLowerCase();
|
|
loweredCase = true;
|
|
}
|
|
}
|
|
};
|
|
|
|
Buffer.prototype.toJSON = function toJSON () {
|
|
return {
|
|
type: 'Buffer',
|
|
data: Array.prototype.slice.call(this._arr || this, 0)
|
|
}
|
|
};
|
|
|
|
function base64Slice (buf, start, end) {
|
|
if (start === 0 && end === buf.length) {
|
|
return fromByteArray(buf)
|
|
} else {
|
|
return fromByteArray(buf.slice(start, end))
|
|
}
|
|
}
|
|
|
|
function utf8Slice (buf, start, end) {
|
|
end = Math.min(buf.length, end);
|
|
var res = [];
|
|
|
|
var i = start;
|
|
while (i < end) {
|
|
var firstByte = buf[i];
|
|
var codePoint = null;
|
|
var bytesPerSequence = (firstByte > 0xEF) ? 4
|
|
: (firstByte > 0xDF) ? 3
|
|
: (firstByte > 0xBF) ? 2
|
|
: 1;
|
|
|
|
if (i + bytesPerSequence <= end) {
|
|
var secondByte, thirdByte, fourthByte, tempCodePoint;
|
|
|
|
switch (bytesPerSequence) {
|
|
case 1:
|
|
if (firstByte < 0x80) {
|
|
codePoint = firstByte;
|
|
}
|
|
break
|
|
case 2:
|
|
secondByte = buf[i + 1];
|
|
if ((secondByte & 0xC0) === 0x80) {
|
|
tempCodePoint = (firstByte & 0x1F) << 0x6 | (secondByte & 0x3F);
|
|
if (tempCodePoint > 0x7F) {
|
|
codePoint = tempCodePoint;
|
|
}
|
|
}
|
|
break
|
|
case 3:
|
|
secondByte = buf[i + 1];
|
|
thirdByte = buf[i + 2];
|
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80) {
|
|
tempCodePoint = (firstByte & 0xF) << 0xC | (secondByte & 0x3F) << 0x6 | (thirdByte & 0x3F);
|
|
if (tempCodePoint > 0x7FF && (tempCodePoint < 0xD800 || tempCodePoint > 0xDFFF)) {
|
|
codePoint = tempCodePoint;
|
|
}
|
|
}
|
|
break
|
|
case 4:
|
|
secondByte = buf[i + 1];
|
|
thirdByte = buf[i + 2];
|
|
fourthByte = buf[i + 3];
|
|
if ((secondByte & 0xC0) === 0x80 && (thirdByte & 0xC0) === 0x80 && (fourthByte & 0xC0) === 0x80) {
|
|
tempCodePoint = (firstByte & 0xF) << 0x12 | (secondByte & 0x3F) << 0xC | (thirdByte & 0x3F) << 0x6 | (fourthByte & 0x3F);
|
|
if (tempCodePoint > 0xFFFF && tempCodePoint < 0x110000) {
|
|
codePoint = tempCodePoint;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
if (codePoint === null) {
|
|
// we did not generate a valid codePoint so insert a
|
|
// replacement char (U+FFFD) and advance only 1 byte
|
|
codePoint = 0xFFFD;
|
|
bytesPerSequence = 1;
|
|
} else if (codePoint > 0xFFFF) {
|
|
// encode to utf16 (surrogate pair dance)
|
|
codePoint -= 0x10000;
|
|
res.push(codePoint >>> 10 & 0x3FF | 0xD800);
|
|
codePoint = 0xDC00 | codePoint & 0x3FF;
|
|
}
|
|
|
|
res.push(codePoint);
|
|
i += bytesPerSequence;
|
|
}
|
|
|
|
return decodeCodePointsArray(res)
|
|
}
|
|
|
|
// Based on http://stackoverflow.com/a/22747272/680742, the browser with
|
|
// the lowest limit is Chrome, with 0x10000 args.
|
|
// We go 1 magnitude less, for safety
|
|
var MAX_ARGUMENTS_LENGTH = 0x1000;
|
|
|
|
function decodeCodePointsArray (codePoints) {
|
|
var len = codePoints.length;
|
|
if (len <= MAX_ARGUMENTS_LENGTH) {
|
|
return String.fromCharCode.apply(String, codePoints) // avoid extra slice()
|
|
}
|
|
|
|
// Decode in chunks to avoid "call stack size exceeded".
|
|
var res = '';
|
|
var i = 0;
|
|
while (i < len) {
|
|
res += String.fromCharCode.apply(
|
|
String,
|
|
codePoints.slice(i, i += MAX_ARGUMENTS_LENGTH)
|
|
);
|
|
}
|
|
return res
|
|
}
|
|
|
|
function asciiSlice (buf, start, end) {
|
|
var ret = '';
|
|
end = Math.min(buf.length, end);
|
|
|
|
for (var i = start; i < end; ++i) {
|
|
ret += String.fromCharCode(buf[i] & 0x7F);
|
|
}
|
|
return ret
|
|
}
|
|
|
|
function latin1Slice (buf, start, end) {
|
|
var ret = '';
|
|
end = Math.min(buf.length, end);
|
|
|
|
for (var i = start; i < end; ++i) {
|
|
ret += String.fromCharCode(buf[i]);
|
|
}
|
|
return ret
|
|
}
|
|
|
|
function hexSlice (buf, start, end) {
|
|
var len = buf.length;
|
|
|
|
if (!start || start < 0) start = 0;
|
|
if (!end || end < 0 || end > len) end = len;
|
|
|
|
var out = '';
|
|
for (var i = start; i < end; ++i) {
|
|
out += toHex(buf[i]);
|
|
}
|
|
return out
|
|
}
|
|
|
|
function utf16leSlice (buf, start, end) {
|
|
var bytes = buf.slice(start, end);
|
|
var res = '';
|
|
for (var i = 0; i < bytes.length; i += 2) {
|
|
res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256);
|
|
}
|
|
return res
|
|
}
|
|
|
|
Buffer.prototype.slice = function slice (start, end) {
|
|
var len = this.length;
|
|
start = ~~start;
|
|
end = end === undefined ? len : ~~end;
|
|
|
|
if (start < 0) {
|
|
start += len;
|
|
if (start < 0) start = 0;
|
|
} else if (start > len) {
|
|
start = len;
|
|
}
|
|
|
|
if (end < 0) {
|
|
end += len;
|
|
if (end < 0) end = 0;
|
|
} else if (end > len) {
|
|
end = len;
|
|
}
|
|
|
|
if (end < start) end = start;
|
|
|
|
var newBuf;
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
newBuf = this.subarray(start, end);
|
|
newBuf.__proto__ = Buffer.prototype;
|
|
} else {
|
|
var sliceLen = end - start;
|
|
newBuf = new Buffer(sliceLen, undefined);
|
|
for (var i = 0; i < sliceLen; ++i) {
|
|
newBuf[i] = this[i + start];
|
|
}
|
|
}
|
|
|
|
return newBuf
|
|
};
|
|
|
|
/*
|
|
* Need to make sure that buffer isn't trying to write out of bounds.
|
|
*/
|
|
function checkOffset (offset, ext, length) {
|
|
if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint')
|
|
if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length')
|
|
}
|
|
|
|
Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) {
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
var val = this[offset];
|
|
var mul = 1;
|
|
var i = 0;
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
val += this[offset + i] * mul;
|
|
}
|
|
|
|
return val
|
|
};
|
|
|
|
Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) {
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) {
|
|
checkOffset(offset, byteLength, this.length);
|
|
}
|
|
|
|
var val = this[offset + --byteLength];
|
|
var mul = 1;
|
|
while (byteLength > 0 && (mul *= 0x100)) {
|
|
val += this[offset + --byteLength] * mul;
|
|
}
|
|
|
|
return val
|
|
};
|
|
|
|
Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 1, this.length);
|
|
return this[offset]
|
|
};
|
|
|
|
Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
return this[offset] | (this[offset + 1] << 8)
|
|
};
|
|
|
|
Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
return (this[offset] << 8) | this[offset + 1]
|
|
};
|
|
|
|
Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return ((this[offset]) |
|
|
(this[offset + 1] << 8) |
|
|
(this[offset + 2] << 16)) +
|
|
(this[offset + 3] * 0x1000000)
|
|
};
|
|
|
|
Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return (this[offset] * 0x1000000) +
|
|
((this[offset + 1] << 16) |
|
|
(this[offset + 2] << 8) |
|
|
this[offset + 3])
|
|
};
|
|
|
|
Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) {
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
var val = this[offset];
|
|
var mul = 1;
|
|
var i = 0;
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
val += this[offset + i] * mul;
|
|
}
|
|
mul *= 0x80;
|
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
|
|
|
return val
|
|
};
|
|
|
|
Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) {
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) checkOffset(offset, byteLength, this.length);
|
|
|
|
var i = byteLength;
|
|
var mul = 1;
|
|
var val = this[offset + --i];
|
|
while (i > 0 && (mul *= 0x100)) {
|
|
val += this[offset + --i] * mul;
|
|
}
|
|
mul *= 0x80;
|
|
|
|
if (val >= mul) val -= Math.pow(2, 8 * byteLength);
|
|
|
|
return val
|
|
};
|
|
|
|
Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 1, this.length);
|
|
if (!(this[offset] & 0x80)) return (this[offset])
|
|
return ((0xff - this[offset] + 1) * -1)
|
|
};
|
|
|
|
Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
var val = this[offset] | (this[offset + 1] << 8);
|
|
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
|
};
|
|
|
|
Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 2, this.length);
|
|
var val = this[offset + 1] | (this[offset] << 8);
|
|
return (val & 0x8000) ? val | 0xFFFF0000 : val
|
|
};
|
|
|
|
Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return (this[offset]) |
|
|
(this[offset + 1] << 8) |
|
|
(this[offset + 2] << 16) |
|
|
(this[offset + 3] << 24)
|
|
};
|
|
|
|
Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
|
|
return (this[offset] << 24) |
|
|
(this[offset + 1] << 16) |
|
|
(this[offset + 2] << 8) |
|
|
(this[offset + 3])
|
|
};
|
|
|
|
Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
return read(this, offset, true, 23, 4)
|
|
};
|
|
|
|
Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 4, this.length);
|
|
return read(this, offset, false, 23, 4)
|
|
};
|
|
|
|
Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 8, this.length);
|
|
return read(this, offset, true, 52, 8)
|
|
};
|
|
|
|
Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) {
|
|
if (!noAssert) checkOffset(offset, 8, this.length);
|
|
return read(this, offset, false, 52, 8)
|
|
};
|
|
|
|
function checkInt (buf, value, offset, ext, max, min) {
|
|
if (!internalIsBuffer(buf)) throw new TypeError('"buffer" argument must be a Buffer instance')
|
|
if (value > max || value < min) throw new RangeError('"value" argument is out of bounds')
|
|
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
|
}
|
|
|
|
Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) {
|
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
|
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
|
}
|
|
|
|
var mul = 1;
|
|
var i = 0;
|
|
this[offset] = value & 0xFF;
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
this[offset + i] = (value / mul) & 0xFF;
|
|
}
|
|
|
|
return offset + byteLength
|
|
};
|
|
|
|
Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
byteLength = byteLength | 0;
|
|
if (!noAssert) {
|
|
var maxBytes = Math.pow(2, 8 * byteLength) - 1;
|
|
checkInt(this, value, offset, byteLength, maxBytes, 0);
|
|
}
|
|
|
|
var i = byteLength - 1;
|
|
var mul = 1;
|
|
this[offset + i] = value & 0xFF;
|
|
while (--i >= 0 && (mul *= 0x100)) {
|
|
this[offset + i] = (value / mul) & 0xFF;
|
|
}
|
|
|
|
return offset + byteLength
|
|
};
|
|
|
|
Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0);
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value);
|
|
this[offset] = (value & 0xff);
|
|
return offset + 1
|
|
};
|
|
|
|
function objectWriteUInt16 (buf, value, offset, littleEndian) {
|
|
if (value < 0) value = 0xffff + value + 1;
|
|
for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; ++i) {
|
|
buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>>
|
|
(littleEndian ? i : 1 - i) * 8;
|
|
}
|
|
}
|
|
|
|
Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value & 0xff);
|
|
this[offset + 1] = (value >>> 8);
|
|
} else {
|
|
objectWriteUInt16(this, value, offset, true);
|
|
}
|
|
return offset + 2
|
|
};
|
|
|
|
Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value >>> 8);
|
|
this[offset + 1] = (value & 0xff);
|
|
} else {
|
|
objectWriteUInt16(this, value, offset, false);
|
|
}
|
|
return offset + 2
|
|
};
|
|
|
|
function objectWriteUInt32 (buf, value, offset, littleEndian) {
|
|
if (value < 0) value = 0xffffffff + value + 1;
|
|
for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; ++i) {
|
|
buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff;
|
|
}
|
|
}
|
|
|
|
Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset + 3] = (value >>> 24);
|
|
this[offset + 2] = (value >>> 16);
|
|
this[offset + 1] = (value >>> 8);
|
|
this[offset] = (value & 0xff);
|
|
} else {
|
|
objectWriteUInt32(this, value, offset, true);
|
|
}
|
|
return offset + 4
|
|
};
|
|
|
|
Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value >>> 24);
|
|
this[offset + 1] = (value >>> 16);
|
|
this[offset + 2] = (value >>> 8);
|
|
this[offset + 3] = (value & 0xff);
|
|
} else {
|
|
objectWriteUInt32(this, value, offset, false);
|
|
}
|
|
return offset + 4
|
|
};
|
|
|
|
Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) {
|
|
var limit = Math.pow(2, 8 * byteLength - 1);
|
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
|
}
|
|
|
|
var i = 0;
|
|
var mul = 1;
|
|
var sub = 0;
|
|
this[offset] = value & 0xFF;
|
|
while (++i < byteLength && (mul *= 0x100)) {
|
|
if (value < 0 && sub === 0 && this[offset + i - 1] !== 0) {
|
|
sub = 1;
|
|
}
|
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
|
}
|
|
|
|
return offset + byteLength
|
|
};
|
|
|
|
Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) {
|
|
var limit = Math.pow(2, 8 * byteLength - 1);
|
|
|
|
checkInt(this, value, offset, byteLength, limit - 1, -limit);
|
|
}
|
|
|
|
var i = byteLength - 1;
|
|
var mul = 1;
|
|
var sub = 0;
|
|
this[offset + i] = value & 0xFF;
|
|
while (--i >= 0 && (mul *= 0x100)) {
|
|
if (value < 0 && sub === 0 && this[offset + i + 1] !== 0) {
|
|
sub = 1;
|
|
}
|
|
this[offset + i] = ((value / mul) >> 0) - sub & 0xFF;
|
|
}
|
|
|
|
return offset + byteLength
|
|
};
|
|
|
|
Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80);
|
|
if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value);
|
|
if (value < 0) value = 0xff + value + 1;
|
|
this[offset] = (value & 0xff);
|
|
return offset + 1
|
|
};
|
|
|
|
Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value & 0xff);
|
|
this[offset + 1] = (value >>> 8);
|
|
} else {
|
|
objectWriteUInt16(this, value, offset, true);
|
|
}
|
|
return offset + 2
|
|
};
|
|
|
|
Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value >>> 8);
|
|
this[offset + 1] = (value & 0xff);
|
|
} else {
|
|
objectWriteUInt16(this, value, offset, false);
|
|
}
|
|
return offset + 2
|
|
};
|
|
|
|
Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value & 0xff);
|
|
this[offset + 1] = (value >>> 8);
|
|
this[offset + 2] = (value >>> 16);
|
|
this[offset + 3] = (value >>> 24);
|
|
} else {
|
|
objectWriteUInt32(this, value, offset, true);
|
|
}
|
|
return offset + 4
|
|
};
|
|
|
|
Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) {
|
|
value = +value;
|
|
offset = offset | 0;
|
|
if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000);
|
|
if (value < 0) value = 0xffffffff + value + 1;
|
|
if (Buffer.TYPED_ARRAY_SUPPORT) {
|
|
this[offset] = (value >>> 24);
|
|
this[offset + 1] = (value >>> 16);
|
|
this[offset + 2] = (value >>> 8);
|
|
this[offset + 3] = (value & 0xff);
|
|
} else {
|
|
objectWriteUInt32(this, value, offset, false);
|
|
}
|
|
return offset + 4
|
|
};
|
|
|
|
function checkIEEE754 (buf, value, offset, ext, max, min) {
|
|
if (offset + ext > buf.length) throw new RangeError('Index out of range')
|
|
if (offset < 0) throw new RangeError('Index out of range')
|
|
}
|
|
|
|
function writeFloat (buf, value, offset, littleEndian, noAssert) {
|
|
if (!noAssert) {
|
|
checkIEEE754(buf, value, offset, 4);
|
|
}
|
|
write(buf, value, offset, littleEndian, 23, 4);
|
|
return offset + 4
|
|
}
|
|
|
|
Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) {
|
|
return writeFloat(this, value, offset, true, noAssert)
|
|
};
|
|
|
|
Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) {
|
|
return writeFloat(this, value, offset, false, noAssert)
|
|
};
|
|
|
|
function writeDouble (buf, value, offset, littleEndian, noAssert) {
|
|
if (!noAssert) {
|
|
checkIEEE754(buf, value, offset, 8);
|
|
}
|
|
write(buf, value, offset, littleEndian, 52, 8);
|
|
return offset + 8
|
|
}
|
|
|
|
Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) {
|
|
return writeDouble(this, value, offset, true, noAssert)
|
|
};
|
|
|
|
Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) {
|
|
return writeDouble(this, value, offset, false, noAssert)
|
|
};
|
|
|
|
// copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length)
|
|
Buffer.prototype.copy = function copy (target, targetStart, start, end) {
|
|
if (!start) start = 0;
|
|
if (!end && end !== 0) end = this.length;
|
|
if (targetStart >= target.length) targetStart = target.length;
|
|
if (!targetStart) targetStart = 0;
|
|
if (end > 0 && end < start) end = start;
|
|
|
|
// Copy 0 bytes; we're done
|
|
if (end === start) return 0
|
|
if (target.length === 0 || this.length === 0) return 0
|
|
|
|
// Fatal error conditions
|
|
if (targetStart < 0) {
|
|
throw new RangeError('targetStart out of bounds')
|
|
}
|
|
if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds')
|
|
if (end < 0) throw new RangeError('sourceEnd out of bounds')
|
|
|
|
// Are we oob?
|
|
if (end > this.length) end = this.length;
|
|
if (target.length - targetStart < end - start) {
|
|
end = target.length - targetStart + start;
|
|
}
|
|
|
|
var len = end - start;
|
|
var i;
|
|
|
|
if (this === target && start < targetStart && targetStart < end) {
|
|
// descending copy from end
|
|
for (i = len - 1; i >= 0; --i) {
|
|
target[i + targetStart] = this[i + start];
|
|
}
|
|
} else if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) {
|
|
// ascending copy from start
|
|
for (i = 0; i < len; ++i) {
|
|
target[i + targetStart] = this[i + start];
|
|
}
|
|
} else {
|
|
Uint8Array.prototype.set.call(
|
|
target,
|
|
this.subarray(start, start + len),
|
|
targetStart
|
|
);
|
|
}
|
|
|
|
return len
|
|
};
|
|
|
|
// Usage:
|
|
// buffer.fill(number[, offset[, end]])
|
|
// buffer.fill(buffer[, offset[, end]])
|
|
// buffer.fill(string[, offset[, end]][, encoding])
|
|
Buffer.prototype.fill = function fill (val, start, end, encoding) {
|
|
// Handle string cases:
|
|
if (typeof val === 'string') {
|
|
if (typeof start === 'string') {
|
|
encoding = start;
|
|
start = 0;
|
|
end = this.length;
|
|
} else if (typeof end === 'string') {
|
|
encoding = end;
|
|
end = this.length;
|
|
}
|
|
if (val.length === 1) {
|
|
var code = val.charCodeAt(0);
|
|
if (code < 256) {
|
|
val = code;
|
|
}
|
|
}
|
|
if (encoding !== undefined && typeof encoding !== 'string') {
|
|
throw new TypeError('encoding must be a string')
|
|
}
|
|
if (typeof encoding === 'string' && !Buffer.isEncoding(encoding)) {
|
|
throw new TypeError('Unknown encoding: ' + encoding)
|
|
}
|
|
} else if (typeof val === 'number') {
|
|
val = val & 255;
|
|
}
|
|
|
|
// Invalid ranges are not set to a default, so can range check early.
|
|
if (start < 0 || this.length < start || this.length < end) {
|
|
throw new RangeError('Out of range index')
|
|
}
|
|
|
|
if (end <= start) {
|
|
return this
|
|
}
|
|
|
|
start = start >>> 0;
|
|
end = end === undefined ? this.length : end >>> 0;
|
|
|
|
if (!val) val = 0;
|
|
|
|
var i;
|
|
if (typeof val === 'number') {
|
|
for (i = start; i < end; ++i) {
|
|
this[i] = val;
|
|
}
|
|
} else {
|
|
var bytes = internalIsBuffer(val)
|
|
? val
|
|
: utf8ToBytes(new Buffer(val, encoding).toString());
|
|
var len = bytes.length;
|
|
for (i = 0; i < end - start; ++i) {
|
|
this[i + start] = bytes[i % len];
|
|
}
|
|
}
|
|
|
|
return this
|
|
};
|
|
|
|
// HELPER FUNCTIONS
|
|
// ================
|
|
|
|
var INVALID_BASE64_RE = /[^+\/0-9A-Za-z-_]/g;
|
|
|
|
function base64clean (str) {
|
|
// Node strips out invalid characters like \n and \t from the string, base64-js does not
|
|
str = stringtrim(str).replace(INVALID_BASE64_RE, '');
|
|
// Node converts strings with length < 2 to ''
|
|
if (str.length < 2) return ''
|
|
// Node allows for non-padded base64 strings (missing trailing ===), base64-js does not
|
|
while (str.length % 4 !== 0) {
|
|
str = str + '=';
|
|
}
|
|
return str
|
|
}
|
|
|
|
function stringtrim (str) {
|
|
if (str.trim) return str.trim()
|
|
return str.replace(/^\s+|\s+$/g, '')
|
|
}
|
|
|
|
function toHex (n) {
|
|
if (n < 16) return '0' + n.toString(16)
|
|
return n.toString(16)
|
|
}
|
|
|
|
function utf8ToBytes (string, units) {
|
|
units = units || Infinity;
|
|
var codePoint;
|
|
var length = string.length;
|
|
var leadSurrogate = null;
|
|
var bytes = [];
|
|
|
|
for (var i = 0; i < length; ++i) {
|
|
codePoint = string.charCodeAt(i);
|
|
|
|
// is surrogate component
|
|
if (codePoint > 0xD7FF && codePoint < 0xE000) {
|
|
// last char was a lead
|
|
if (!leadSurrogate) {
|
|
// no lead yet
|
|
if (codePoint > 0xDBFF) {
|
|
// unexpected trail
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
continue
|
|
} else if (i + 1 === length) {
|
|
// unpaired lead
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
continue
|
|
}
|
|
|
|
// valid lead
|
|
leadSurrogate = codePoint;
|
|
|
|
continue
|
|
}
|
|
|
|
// 2 leads in a row
|
|
if (codePoint < 0xDC00) {
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
leadSurrogate = codePoint;
|
|
continue
|
|
}
|
|
|
|
// valid surrogate pair
|
|
codePoint = (leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00) + 0x10000;
|
|
} else if (leadSurrogate) {
|
|
// valid bmp char, but last char was a lead
|
|
if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD);
|
|
}
|
|
|
|
leadSurrogate = null;
|
|
|
|
// encode utf8
|
|
if (codePoint < 0x80) {
|
|
if ((units -= 1) < 0) break
|
|
bytes.push(codePoint);
|
|
} else if (codePoint < 0x800) {
|
|
if ((units -= 2) < 0) break
|
|
bytes.push(
|
|
codePoint >> 0x6 | 0xC0,
|
|
codePoint & 0x3F | 0x80
|
|
);
|
|
} else if (codePoint < 0x10000) {
|
|
if ((units -= 3) < 0) break
|
|
bytes.push(
|
|
codePoint >> 0xC | 0xE0,
|
|
codePoint >> 0x6 & 0x3F | 0x80,
|
|
codePoint & 0x3F | 0x80
|
|
);
|
|
} else if (codePoint < 0x110000) {
|
|
if ((units -= 4) < 0) break
|
|
bytes.push(
|
|
codePoint >> 0x12 | 0xF0,
|
|
codePoint >> 0xC & 0x3F | 0x80,
|
|
codePoint >> 0x6 & 0x3F | 0x80,
|
|
codePoint & 0x3F | 0x80
|
|
);
|
|
} else {
|
|
throw new Error('Invalid code point')
|
|
}
|
|
}
|
|
|
|
return bytes
|
|
}
|
|
|
|
function asciiToBytes (str) {
|
|
var byteArray = [];
|
|
for (var i = 0; i < str.length; ++i) {
|
|
// Node's code seems to be doing this and not & 0x7F..
|
|
byteArray.push(str.charCodeAt(i) & 0xFF);
|
|
}
|
|
return byteArray
|
|
}
|
|
|
|
function utf16leToBytes (str, units) {
|
|
var c, hi, lo;
|
|
var byteArray = [];
|
|
for (var i = 0; i < str.length; ++i) {
|
|
if ((units -= 2) < 0) break
|
|
|
|
c = str.charCodeAt(i);
|
|
hi = c >> 8;
|
|
lo = c % 256;
|
|
byteArray.push(lo);
|
|
byteArray.push(hi);
|
|
}
|
|
|
|
return byteArray
|
|
}
|
|
|
|
|
|
function base64ToBytes (str) {
|
|
return toByteArray(base64clean(str))
|
|
}
|
|
|
|
function blitBuffer (src, dst, offset, length) {
|
|
for (var i = 0; i < length; ++i) {
|
|
if ((i + offset >= dst.length) || (i >= src.length)) break
|
|
dst[i + offset] = src[i];
|
|
}
|
|
return i
|
|
}
|
|
|
|
function isnan (val) {
|
|
return val !== val // eslint-disable-line no-self-compare
|
|
}
|
|
|
|
|
|
// the following is from is-buffer, also by Feross Aboukhadijeh and with same lisence
|
|
// The _isBuffer check is for Safari 5-7 support, because it's missing
|
|
// Object.prototype.constructor. Remove this eventually
|
|
function isBuffer(obj) {
|
|
return obj != null && (!!obj._isBuffer || isFastBuffer(obj) || isSlowBuffer(obj))
|
|
}
|
|
|
|
function isFastBuffer (obj) {
|
|
return !!obj.constructor && typeof obj.constructor.isBuffer === 'function' && obj.constructor.isBuffer(obj)
|
|
}
|
|
|
|
// For Node v0.10 support. Remove this eventually.
|
|
function isSlowBuffer (obj) {
|
|
return typeof obj.readFloatLE === 'function' && typeof obj.slice === 'function' && isFastBuffer(obj.slice(0, 0))
|
|
}
|
|
|
|
// shim for using process in browser
|
|
// based off https://github.com/defunctzombie/node-process/blob/master/browser.js
|
|
|
|
function defaultSetTimout() {
|
|
throw new Error('setTimeout has not been defined');
|
|
}
|
|
function defaultClearTimeout () {
|
|
throw new Error('clearTimeout has not been defined');
|
|
}
|
|
var cachedSetTimeout = defaultSetTimout;
|
|
var cachedClearTimeout = defaultClearTimeout;
|
|
if (typeof global$1.setTimeout === 'function') {
|
|
cachedSetTimeout = setTimeout;
|
|
}
|
|
if (typeof global$1.clearTimeout === 'function') {
|
|
cachedClearTimeout = clearTimeout;
|
|
}
|
|
|
|
function runTimeout(fun) {
|
|
if (cachedSetTimeout === setTimeout) {
|
|
//normal enviroments in sane situations
|
|
return setTimeout(fun, 0);
|
|
}
|
|
// if setTimeout wasn't available but was latter defined
|
|
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
|
|
cachedSetTimeout = setTimeout;
|
|
return setTimeout(fun, 0);
|
|
}
|
|
try {
|
|
// when when somebody has screwed with setTimeout but no I.E. maddness
|
|
return cachedSetTimeout(fun, 0);
|
|
} catch(e){
|
|
try {
|
|
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
|
return cachedSetTimeout.call(null, fun, 0);
|
|
} catch(e){
|
|
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
|
|
return cachedSetTimeout.call(this, fun, 0);
|
|
}
|
|
}
|
|
|
|
|
|
}
|
|
function runClearTimeout(marker) {
|
|
if (cachedClearTimeout === clearTimeout) {
|
|
//normal enviroments in sane situations
|
|
return clearTimeout(marker);
|
|
}
|
|
// if clearTimeout wasn't available but was latter defined
|
|
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
|
|
cachedClearTimeout = clearTimeout;
|
|
return clearTimeout(marker);
|
|
}
|
|
try {
|
|
// when when somebody has screwed with setTimeout but no I.E. maddness
|
|
return cachedClearTimeout(marker);
|
|
} catch (e){
|
|
try {
|
|
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
|
|
return cachedClearTimeout.call(null, marker);
|
|
} catch (e){
|
|
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
|
|
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
|
|
return cachedClearTimeout.call(this, marker);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
}
|
|
var queue = [];
|
|
var draining = false;
|
|
var currentQueue;
|
|
var queueIndex = -1;
|
|
|
|
function cleanUpNextTick() {
|
|
if (!draining || !currentQueue) {
|
|
return;
|
|
}
|
|
draining = false;
|
|
if (currentQueue.length) {
|
|
queue = currentQueue.concat(queue);
|
|
} else {
|
|
queueIndex = -1;
|
|
}
|
|
if (queue.length) {
|
|
drainQueue();
|
|
}
|
|
}
|
|
|
|
function drainQueue() {
|
|
if (draining) {
|
|
return;
|
|
}
|
|
var timeout = runTimeout(cleanUpNextTick);
|
|
draining = true;
|
|
|
|
var len = queue.length;
|
|
while(len) {
|
|
currentQueue = queue;
|
|
queue = [];
|
|
while (++queueIndex < len) {
|
|
if (currentQueue) {
|
|
currentQueue[queueIndex].run();
|
|
}
|
|
}
|
|
queueIndex = -1;
|
|
len = queue.length;
|
|
}
|
|
currentQueue = null;
|
|
draining = false;
|
|
runClearTimeout(timeout);
|
|
}
|
|
function nextTick(fun) {
|
|
var args = new Array(arguments.length - 1);
|
|
if (arguments.length > 1) {
|
|
for (var i = 1; i < arguments.length; i++) {
|
|
args[i - 1] = arguments[i];
|
|
}
|
|
}
|
|
queue.push(new Item(fun, args));
|
|
if (queue.length === 1 && !draining) {
|
|
runTimeout(drainQueue);
|
|
}
|
|
}
|
|
// v8 likes predictible objects
|
|
function Item(fun, array) {
|
|
this.fun = fun;
|
|
this.array = array;
|
|
}
|
|
Item.prototype.run = function () {
|
|
this.fun.apply(null, this.array);
|
|
};
|
|
|
|
// from https://github.com/kumavis/browser-process-hrtime/blob/master/index.js
|
|
var performance = global$1.performance || {};
|
|
var performanceNow =
|
|
performance.now ||
|
|
performance.mozNow ||
|
|
performance.msNow ||
|
|
performance.oNow ||
|
|
performance.webkitNow ||
|
|
function(){ return (new Date()).getTime() };
|
|
|
|
var inherits;
|
|
if (typeof Object.create === 'function'){
|
|
inherits = function inherits(ctor, superCtor) {
|
|
// implementation from standard node.js 'util' module
|
|
ctor.super_ = superCtor;
|
|
ctor.prototype = Object.create(superCtor.prototype, {
|
|
constructor: {
|
|
value: ctor,
|
|
enumerable: false,
|
|
writable: true,
|
|
configurable: true
|
|
}
|
|
});
|
|
};
|
|
} else {
|
|
inherits = function inherits(ctor, superCtor) {
|
|
ctor.super_ = superCtor;
|
|
var TempCtor = function () {};
|
|
TempCtor.prototype = superCtor.prototype;
|
|
ctor.prototype = new TempCtor();
|
|
ctor.prototype.constructor = ctor;
|
|
};
|
|
}
|
|
var inherits$1 = inherits;
|
|
|
|
var formatRegExp = /%[sdj%]/g;
|
|
function format(f) {
|
|
if (!isString(f)) {
|
|
var objects = [];
|
|
for (var i = 0; i < arguments.length; i++) {
|
|
objects.push(inspect(arguments[i]));
|
|
}
|
|
return objects.join(' ');
|
|
}
|
|
|
|
var i = 1;
|
|
var args = arguments;
|
|
var len = args.length;
|
|
var str = String(f).replace(formatRegExp, function(x) {
|
|
if (x === '%%') return '%';
|
|
if (i >= len) return x;
|
|
switch (x) {
|
|
case '%s': return String(args[i++]);
|
|
case '%d': return Number(args[i++]);
|
|
case '%j':
|
|
try {
|
|
return JSON.stringify(args[i++]);
|
|
} catch (_) {
|
|
return '[Circular]';
|
|
}
|
|
default:
|
|
return x;
|
|
}
|
|
});
|
|
for (var x = args[i]; i < len; x = args[++i]) {
|
|
if (isNull(x) || !isObject(x)) {
|
|
str += ' ' + x;
|
|
} else {
|
|
str += ' ' + inspect(x);
|
|
}
|
|
}
|
|
return str;
|
|
}
|
|
|
|
// Mark that a method should not be used.
|
|
// Returns a modified function which warns once by default.
|
|
// If --no-deprecation is set, then it is a no-op.
|
|
function deprecate(fn, msg) {
|
|
// Allow for deprecating things in the process of starting up.
|
|
if (isUndefined(global$1.process)) {
|
|
return function() {
|
|
return deprecate(fn, msg).apply(this, arguments);
|
|
};
|
|
}
|
|
|
|
var warned = false;
|
|
function deprecated() {
|
|
if (!warned) {
|
|
{
|
|
console.error(msg);
|
|
}
|
|
warned = true;
|
|
}
|
|
return fn.apply(this, arguments);
|
|
}
|
|
|
|
return deprecated;
|
|
}
|
|
|
|
var debugs = {};
|
|
var debugEnviron;
|
|
function debuglog(set) {
|
|
if (isUndefined(debugEnviron))
|
|
debugEnviron = '';
|
|
set = set.toUpperCase();
|
|
if (!debugs[set]) {
|
|
if (new RegExp('\\b' + set + '\\b', 'i').test(debugEnviron)) {
|
|
var pid = 0;
|
|
debugs[set] = function() {
|
|
var msg = format.apply(null, arguments);
|
|
console.error('%s %d: %s', set, pid, msg);
|
|
};
|
|
} else {
|
|
debugs[set] = function() {};
|
|
}
|
|
}
|
|
return debugs[set];
|
|
}
|
|
|
|
/**
|
|
* Echos the value of a value. Trys to print the value out
|
|
* in the best way possible given the different types.
|
|
*
|
|
* @param {Object} obj The object to print out.
|
|
* @param {Object} opts Optional options object that alters the output.
|
|
*/
|
|
/* legacy: obj, showHidden, depth, colors*/
|
|
function inspect(obj, opts) {
|
|
// default options
|
|
var ctx = {
|
|
seen: [],
|
|
stylize: stylizeNoColor
|
|
};
|
|
// legacy...
|
|
if (arguments.length >= 3) ctx.depth = arguments[2];
|
|
if (arguments.length >= 4) ctx.colors = arguments[3];
|
|
if (isBoolean(opts)) {
|
|
// legacy...
|
|
ctx.showHidden = opts;
|
|
} else if (opts) {
|
|
// got an "options" object
|
|
_extend(ctx, opts);
|
|
}
|
|
// set default options
|
|
if (isUndefined(ctx.showHidden)) ctx.showHidden = false;
|
|
if (isUndefined(ctx.depth)) ctx.depth = 2;
|
|
if (isUndefined(ctx.colors)) ctx.colors = false;
|
|
if (isUndefined(ctx.customInspect)) ctx.customInspect = true;
|
|
if (ctx.colors) ctx.stylize = stylizeWithColor;
|
|
return formatValue(ctx, obj, ctx.depth);
|
|
}
|
|
|
|
// http://en.wikipedia.org/wiki/ANSI_escape_code#graphics
|
|
inspect.colors = {
|
|
'bold' : [1, 22],
|
|
'italic' : [3, 23],
|
|
'underline' : [4, 24],
|
|
'inverse' : [7, 27],
|
|
'white' : [37, 39],
|
|
'grey' : [90, 39],
|
|
'black' : [30, 39],
|
|
'blue' : [34, 39],
|
|
'cyan' : [36, 39],
|
|
'green' : [32, 39],
|
|
'magenta' : [35, 39],
|
|
'red' : [31, 39],
|
|
'yellow' : [33, 39]
|
|
};
|
|
|
|
// Don't use 'blue' not visible on cmd.exe
|
|
inspect.styles = {
|
|
'special': 'cyan',
|
|
'number': 'yellow',
|
|
'boolean': 'yellow',
|
|
'undefined': 'grey',
|
|
'null': 'bold',
|
|
'string': 'green',
|
|
'date': 'magenta',
|
|
// "name": intentionally not styling
|
|
'regexp': 'red'
|
|
};
|
|
|
|
|
|
function stylizeWithColor(str, styleType) {
|
|
var style = inspect.styles[styleType];
|
|
|
|
if (style) {
|
|
return '\u001b[' + inspect.colors[style][0] + 'm' + str +
|
|
'\u001b[' + inspect.colors[style][1] + 'm';
|
|
} else {
|
|
return str;
|
|
}
|
|
}
|
|
|
|
|
|
function stylizeNoColor(str, styleType) {
|
|
return str;
|
|
}
|
|
|
|
|
|
function arrayToHash(array) {
|
|
var hash = {};
|
|
|
|
array.forEach(function(val, idx) {
|
|
hash[val] = true;
|
|
});
|
|
|
|
return hash;
|
|
}
|
|
|
|
|
|
function formatValue(ctx, value, recurseTimes) {
|
|
// Provide a hook for user-specified inspect functions.
|
|
// Check that value is an object with an inspect function on it
|
|
if (ctx.customInspect &&
|
|
value &&
|
|
isFunction(value.inspect) &&
|
|
// Filter out the util module, it's inspect function is special
|
|
value.inspect !== inspect &&
|
|
// Also filter out any prototype objects using the circular check.
|
|
!(value.constructor && value.constructor.prototype === value)) {
|
|
var ret = value.inspect(recurseTimes, ctx);
|
|
if (!isString(ret)) {
|
|
ret = formatValue(ctx, ret, recurseTimes);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
// Primitive types cannot have properties
|
|
var primitive = formatPrimitive(ctx, value);
|
|
if (primitive) {
|
|
return primitive;
|
|
}
|
|
|
|
// Look up the keys of the object.
|
|
var keys = Object.keys(value);
|
|
var visibleKeys = arrayToHash(keys);
|
|
|
|
if (ctx.showHidden) {
|
|
keys = Object.getOwnPropertyNames(value);
|
|
}
|
|
|
|
// IE doesn't make error fields non-enumerable
|
|
// http://msdn.microsoft.com/en-us/library/ie/dww52sbt(v=vs.94).aspx
|
|
if (isError(value)
|
|
&& (keys.indexOf('message') >= 0 || keys.indexOf('description') >= 0)) {
|
|
return formatError(value);
|
|
}
|
|
|
|
// Some type of object without properties can be shortcutted.
|
|
if (keys.length === 0) {
|
|
if (isFunction(value)) {
|
|
var name = value.name ? ': ' + value.name : '';
|
|
return ctx.stylize('[Function' + name + ']', 'special');
|
|
}
|
|
if (isRegExp(value)) {
|
|
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
|
}
|
|
if (isDate(value)) {
|
|
return ctx.stylize(Date.prototype.toString.call(value), 'date');
|
|
}
|
|
if (isError(value)) {
|
|
return formatError(value);
|
|
}
|
|
}
|
|
|
|
var base = '', array = false, braces = ['{', '}'];
|
|
|
|
// Make Array say that they are Array
|
|
if (isArray$1(value)) {
|
|
array = true;
|
|
braces = ['[', ']'];
|
|
}
|
|
|
|
// Make functions say that they are functions
|
|
if (isFunction(value)) {
|
|
var n = value.name ? ': ' + value.name : '';
|
|
base = ' [Function' + n + ']';
|
|
}
|
|
|
|
// Make RegExps say that they are RegExps
|
|
if (isRegExp(value)) {
|
|
base = ' ' + RegExp.prototype.toString.call(value);
|
|
}
|
|
|
|
// Make dates with properties first say the date
|
|
if (isDate(value)) {
|
|
base = ' ' + Date.prototype.toUTCString.call(value);
|
|
}
|
|
|
|
// Make error with message first say the error
|
|
if (isError(value)) {
|
|
base = ' ' + formatError(value);
|
|
}
|
|
|
|
if (keys.length === 0 && (!array || value.length == 0)) {
|
|
return braces[0] + base + braces[1];
|
|
}
|
|
|
|
if (recurseTimes < 0) {
|
|
if (isRegExp(value)) {
|
|
return ctx.stylize(RegExp.prototype.toString.call(value), 'regexp');
|
|
} else {
|
|
return ctx.stylize('[Object]', 'special');
|
|
}
|
|
}
|
|
|
|
ctx.seen.push(value);
|
|
|
|
var output;
|
|
if (array) {
|
|
output = formatArray(ctx, value, recurseTimes, visibleKeys, keys);
|
|
} else {
|
|
output = keys.map(function(key) {
|
|
return formatProperty(ctx, value, recurseTimes, visibleKeys, key, array);
|
|
});
|
|
}
|
|
|
|
ctx.seen.pop();
|
|
|
|
return reduceToSingleString(output, base, braces);
|
|
}
|
|
|
|
|
|
function formatPrimitive(ctx, value) {
|
|
if (isUndefined(value))
|
|
return ctx.stylize('undefined', 'undefined');
|
|
if (isString(value)) {
|
|
var simple = '\'' + JSON.stringify(value).replace(/^"|"$/g, '')
|
|
.replace(/'/g, "\\'")
|
|
.replace(/\\"/g, '"') + '\'';
|
|
return ctx.stylize(simple, 'string');
|
|
}
|
|
if (isNumber(value))
|
|
return ctx.stylize('' + value, 'number');
|
|
if (isBoolean(value))
|
|
return ctx.stylize('' + value, 'boolean');
|
|
// For some reason typeof null is "object", so special case here.
|
|
if (isNull(value))
|
|
return ctx.stylize('null', 'null');
|
|
}
|
|
|
|
|
|
function formatError(value) {
|
|
return '[' + Error.prototype.toString.call(value) + ']';
|
|
}
|
|
|
|
|
|
function formatArray(ctx, value, recurseTimes, visibleKeys, keys) {
|
|
var output = [];
|
|
for (var i = 0, l = value.length; i < l; ++i) {
|
|
if (hasOwnProperty(value, String(i))) {
|
|
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
|
String(i), true));
|
|
} else {
|
|
output.push('');
|
|
}
|
|
}
|
|
keys.forEach(function(key) {
|
|
if (!key.match(/^\d+$/)) {
|
|
output.push(formatProperty(ctx, value, recurseTimes, visibleKeys,
|
|
key, true));
|
|
}
|
|
});
|
|
return output;
|
|
}
|
|
|
|
|
|
function formatProperty(ctx, value, recurseTimes, visibleKeys, key, array) {
|
|
var name, str, desc;
|
|
desc = Object.getOwnPropertyDescriptor(value, key) || { value: value[key] };
|
|
if (desc.get) {
|
|
if (desc.set) {
|
|
str = ctx.stylize('[Getter/Setter]', 'special');
|
|
} else {
|
|
str = ctx.stylize('[Getter]', 'special');
|
|
}
|
|
} else {
|
|
if (desc.set) {
|
|
str = ctx.stylize('[Setter]', 'special');
|
|
}
|
|
}
|
|
if (!hasOwnProperty(visibleKeys, key)) {
|
|
name = '[' + key + ']';
|
|
}
|
|
if (!str) {
|
|
if (ctx.seen.indexOf(desc.value) < 0) {
|
|
if (isNull(recurseTimes)) {
|
|
str = formatValue(ctx, desc.value, null);
|
|
} else {
|
|
str = formatValue(ctx, desc.value, recurseTimes - 1);
|
|
}
|
|
if (str.indexOf('\n') > -1) {
|
|
if (array) {
|
|
str = str.split('\n').map(function(line) {
|
|
return ' ' + line;
|
|
}).join('\n').substr(2);
|
|
} else {
|
|
str = '\n' + str.split('\n').map(function(line) {
|
|
return ' ' + line;
|
|
}).join('\n');
|
|
}
|
|
}
|
|
} else {
|
|
str = ctx.stylize('[Circular]', 'special');
|
|
}
|
|
}
|
|
if (isUndefined(name)) {
|
|
if (array && key.match(/^\d+$/)) {
|
|
return str;
|
|
}
|
|
name = JSON.stringify('' + key);
|
|
if (name.match(/^"([a-zA-Z_][a-zA-Z_0-9]*)"$/)) {
|
|
name = name.substr(1, name.length - 2);
|
|
name = ctx.stylize(name, 'name');
|
|
} else {
|
|
name = name.replace(/'/g, "\\'")
|
|
.replace(/\\"/g, '"')
|
|
.replace(/(^"|"$)/g, "'");
|
|
name = ctx.stylize(name, 'string');
|
|
}
|
|
}
|
|
|
|
return name + ': ' + str;
|
|
}
|
|
|
|
|
|
function reduceToSingleString(output, base, braces) {
|
|
var length = output.reduce(function(prev, cur) {
|
|
if (cur.indexOf('\n') >= 0) ;
|
|
return prev + cur.replace(/\u001b\[\d\d?m/g, '').length + 1;
|
|
}, 0);
|
|
|
|
if (length > 60) {
|
|
return braces[0] +
|
|
(base === '' ? '' : base + '\n ') +
|
|
' ' +
|
|
output.join(',\n ') +
|
|
' ' +
|
|
braces[1];
|
|
}
|
|
|
|
return braces[0] + base + ' ' + output.join(', ') + ' ' + braces[1];
|
|
}
|
|
|
|
|
|
// NOTE: These type checking functions intentionally don't use `instanceof`
|
|
// because it is fragile and can be easily faked with `Object.create()`.
|
|
function isArray$1(ar) {
|
|
return Array.isArray(ar);
|
|
}
|
|
|
|
function isBoolean(arg) {
|
|
return typeof arg === 'boolean';
|
|
}
|
|
|
|
function isNull(arg) {
|
|
return arg === null;
|
|
}
|
|
|
|
function isNumber(arg) {
|
|
return typeof arg === 'number';
|
|
}
|
|
|
|
function isString(arg) {
|
|
return typeof arg === 'string';
|
|
}
|
|
|
|
function isUndefined(arg) {
|
|
return arg === void 0;
|
|
}
|
|
|
|
function isRegExp(re) {
|
|
return isObject(re) && objectToString(re) === '[object RegExp]';
|
|
}
|
|
|
|
function isObject(arg) {
|
|
return typeof arg === 'object' && arg !== null;
|
|
}
|
|
|
|
function isDate(d) {
|
|
return isObject(d) && objectToString(d) === '[object Date]';
|
|
}
|
|
|
|
function isError(e) {
|
|
return isObject(e) &&
|
|
(objectToString(e) === '[object Error]' || e instanceof Error);
|
|
}
|
|
|
|
function isFunction(arg) {
|
|
return typeof arg === 'function';
|
|
}
|
|
|
|
function objectToString(o) {
|
|
return Object.prototype.toString.call(o);
|
|
}
|
|
|
|
function _extend(origin, add) {
|
|
// Don't do anything if add isn't an object
|
|
if (!add || !isObject(add)) return origin;
|
|
|
|
var keys = Object.keys(add);
|
|
var i = keys.length;
|
|
while (i--) {
|
|
origin[keys[i]] = add[keys[i]];
|
|
}
|
|
return origin;
|
|
}
|
|
function hasOwnProperty(obj, prop) {
|
|
return Object.prototype.hasOwnProperty.call(obj, prop);
|
|
}
|
|
|
|
function BufferList() {
|
|
this.head = null;
|
|
this.tail = null;
|
|
this.length = 0;
|
|
}
|
|
|
|
BufferList.prototype.push = function (v) {
|
|
var entry = { data: v, next: null };
|
|
if (this.length > 0) this.tail.next = entry;else this.head = entry;
|
|
this.tail = entry;
|
|
++this.length;
|
|
};
|
|
|
|
BufferList.prototype.unshift = function (v) {
|
|
var entry = { data: v, next: this.head };
|
|
if (this.length === 0) this.tail = entry;
|
|
this.head = entry;
|
|
++this.length;
|
|
};
|
|
|
|
BufferList.prototype.shift = function () {
|
|
if (this.length === 0) return;
|
|
var ret = this.head.data;
|
|
if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
|
|
--this.length;
|
|
return ret;
|
|
};
|
|
|
|
BufferList.prototype.clear = function () {
|
|
this.head = this.tail = null;
|
|
this.length = 0;
|
|
};
|
|
|
|
BufferList.prototype.join = function (s) {
|
|
if (this.length === 0) return '';
|
|
var p = this.head;
|
|
var ret = '' + p.data;
|
|
while (p = p.next) {
|
|
ret += s + p.data;
|
|
}return ret;
|
|
};
|
|
|
|
BufferList.prototype.concat = function (n) {
|
|
if (this.length === 0) return Buffer.alloc(0);
|
|
if (this.length === 1) return this.head.data;
|
|
var ret = Buffer.allocUnsafe(n >>> 0);
|
|
var p = this.head;
|
|
var i = 0;
|
|
while (p) {
|
|
p.data.copy(ret, i);
|
|
i += p.data.length;
|
|
p = p.next;
|
|
}
|
|
return ret;
|
|
};
|
|
|
|
// Copyright Joyent, Inc. and other Node contributors.
|
|
var isBufferEncoding = Buffer.isEncoding
|
|
|| function(encoding) {
|
|
switch (encoding && encoding.toLowerCase()) {
|
|
case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true;
|
|
default: return false;
|
|
}
|
|
};
|
|
|
|
|
|
function assertEncoding(encoding) {
|
|
if (encoding && !isBufferEncoding(encoding)) {
|
|
throw new Error('Unknown encoding: ' + encoding);
|
|
}
|
|
}
|
|
|
|
// StringDecoder provides an interface for efficiently splitting a series of
|
|
// buffers into a series of JS strings without breaking apart multi-byte
|
|
// characters. CESU-8 is handled as part of the UTF-8 encoding.
|
|
//
|
|
// @TODO Handling all encodings inside a single object makes it very difficult
|
|
// to reason about this code, so it should be split up in the future.
|
|
// @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code
|
|
// points as used by CESU-8.
|
|
function StringDecoder(encoding) {
|
|
this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, '');
|
|
assertEncoding(encoding);
|
|
switch (this.encoding) {
|
|
case 'utf8':
|
|
// CESU-8 represents each of Surrogate Pair by 3-bytes
|
|
this.surrogateSize = 3;
|
|
break;
|
|
case 'ucs2':
|
|
case 'utf16le':
|
|
// UTF-16 represents each of Surrogate Pair by 2-bytes
|
|
this.surrogateSize = 2;
|
|
this.detectIncompleteChar = utf16DetectIncompleteChar;
|
|
break;
|
|
case 'base64':
|
|
// Base-64 stores 3 bytes in 4 chars, and pads the remainder.
|
|
this.surrogateSize = 3;
|
|
this.detectIncompleteChar = base64DetectIncompleteChar;
|
|
break;
|
|
default:
|
|
this.write = passThroughWrite;
|
|
return;
|
|
}
|
|
|
|
// Enough space to store all bytes of a single character. UTF-8 needs 4
|
|
// bytes, but CESU-8 may require up to 6 (3 bytes per surrogate).
|
|
this.charBuffer = new Buffer(6);
|
|
// Number of bytes received for the current incomplete multi-byte character.
|
|
this.charReceived = 0;
|
|
// Number of bytes expected for the current incomplete multi-byte character.
|
|
this.charLength = 0;
|
|
}
|
|
|
|
// write decodes the given buffer and returns it as JS string that is
|
|
// guaranteed to not contain any partial multi-byte characters. Any partial
|
|
// character found at the end of the buffer is buffered up, and will be
|
|
// returned when calling write again with the remaining bytes.
|
|
//
|
|
// Note: Converting a Buffer containing an orphan surrogate to a String
|
|
// currently works, but converting a String to a Buffer (via `new Buffer`, or
|
|
// Buffer#write) will replace incomplete surrogates with the unicode
|
|
// replacement character. See https://codereview.chromium.org/121173009/ .
|
|
StringDecoder.prototype.write = function(buffer) {
|
|
var charStr = '';
|
|
// if our last write ended with an incomplete multibyte character
|
|
while (this.charLength) {
|
|
// determine how many remaining bytes this buffer has to offer for this char
|
|
var available = (buffer.length >= this.charLength - this.charReceived) ?
|
|
this.charLength - this.charReceived :
|
|
buffer.length;
|
|
|
|
// add the new bytes to the char buffer
|
|
buffer.copy(this.charBuffer, this.charReceived, 0, available);
|
|
this.charReceived += available;
|
|
|
|
if (this.charReceived < this.charLength) {
|
|
// still not enough chars in this buffer? wait for more ...
|
|
return '';
|
|
}
|
|
|
|
// remove bytes belonging to the current character from the buffer
|
|
buffer = buffer.slice(available, buffer.length);
|
|
|
|
// get the character that was split
|
|
charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding);
|
|
|
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
|
var charCode = charStr.charCodeAt(charStr.length - 1);
|
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
|
this.charLength += this.surrogateSize;
|
|
charStr = '';
|
|
continue;
|
|
}
|
|
this.charReceived = this.charLength = 0;
|
|
|
|
// if there are no more bytes in this buffer, just emit our char
|
|
if (buffer.length === 0) {
|
|
return charStr;
|
|
}
|
|
break;
|
|
}
|
|
|
|
// determine and set charLength / charReceived
|
|
this.detectIncompleteChar(buffer);
|
|
|
|
var end = buffer.length;
|
|
if (this.charLength) {
|
|
// buffer the incomplete character bytes we got
|
|
buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end);
|
|
end -= this.charReceived;
|
|
}
|
|
|
|
charStr += buffer.toString(this.encoding, 0, end);
|
|
|
|
var end = charStr.length - 1;
|
|
var charCode = charStr.charCodeAt(end);
|
|
// CESU-8: lead surrogate (D800-DBFF) is also the incomplete character
|
|
if (charCode >= 0xD800 && charCode <= 0xDBFF) {
|
|
var size = this.surrogateSize;
|
|
this.charLength += size;
|
|
this.charReceived += size;
|
|
this.charBuffer.copy(this.charBuffer, size, 0, size);
|
|
buffer.copy(this.charBuffer, 0, 0, size);
|
|
return charStr.substring(0, end);
|
|
}
|
|
|
|
// or just emit the charStr
|
|
return charStr;
|
|
};
|
|
|
|
// detectIncompleteChar determines if there is an incomplete UTF-8 character at
|
|
// the end of the given buffer. If so, it sets this.charLength to the byte
|
|
// length that character, and sets this.charReceived to the number of bytes
|
|
// that are available for this character.
|
|
StringDecoder.prototype.detectIncompleteChar = function(buffer) {
|
|
// determine how many bytes we have to check at the end of this buffer
|
|
var i = (buffer.length >= 3) ? 3 : buffer.length;
|
|
|
|
// Figure out if one of the last i bytes of our buffer announces an
|
|
// incomplete char.
|
|
for (; i > 0; i--) {
|
|
var c = buffer[buffer.length - i];
|
|
|
|
// See http://en.wikipedia.org/wiki/UTF-8#Description
|
|
|
|
// 110XXXXX
|
|
if (i == 1 && c >> 5 == 0x06) {
|
|
this.charLength = 2;
|
|
break;
|
|
}
|
|
|
|
// 1110XXXX
|
|
if (i <= 2 && c >> 4 == 0x0E) {
|
|
this.charLength = 3;
|
|
break;
|
|
}
|
|
|
|
// 11110XXX
|
|
if (i <= 3 && c >> 3 == 0x1E) {
|
|
this.charLength = 4;
|
|
break;
|
|
}
|
|
}
|
|
this.charReceived = i;
|
|
};
|
|
|
|
StringDecoder.prototype.end = function(buffer) {
|
|
var res = '';
|
|
if (buffer && buffer.length)
|
|
res = this.write(buffer);
|
|
|
|
if (this.charReceived) {
|
|
var cr = this.charReceived;
|
|
var buf = this.charBuffer;
|
|
var enc = this.encoding;
|
|
res += buf.slice(0, cr).toString(enc);
|
|
}
|
|
|
|
return res;
|
|
};
|
|
|
|
function passThroughWrite(buffer) {
|
|
return buffer.toString(this.encoding);
|
|
}
|
|
|
|
function utf16DetectIncompleteChar(buffer) {
|
|
this.charReceived = buffer.length % 2;
|
|
this.charLength = this.charReceived ? 2 : 0;
|
|
}
|
|
|
|
function base64DetectIncompleteChar(buffer) {
|
|
this.charReceived = buffer.length % 3;
|
|
this.charLength = this.charReceived ? 3 : 0;
|
|
}
|
|
|
|
Readable.ReadableState = ReadableState;
|
|
|
|
var debug = debuglog('stream');
|
|
inherits$1(Readable, EventEmitter);
|
|
|
|
function prependListener(emitter, event, fn) {
|
|
// Sadly this is not cacheable as some libraries bundle their own
|
|
// event emitter implementation with them.
|
|
if (typeof emitter.prependListener === 'function') {
|
|
return emitter.prependListener(event, fn);
|
|
} else {
|
|
// This is a hack to make sure that our error handler is attached before any
|
|
// userland ones. NEVER DO THIS. This is here only because this code needs
|
|
// to continue to work with older versions of Node.js that do not include
|
|
// the prependListener() method. The goal is to eventually remove this hack.
|
|
if (!emitter._events || !emitter._events[event])
|
|
emitter.on(event, fn);
|
|
else if (Array.isArray(emitter._events[event]))
|
|
emitter._events[event].unshift(fn);
|
|
else
|
|
emitter._events[event] = [fn, emitter._events[event]];
|
|
}
|
|
}
|
|
function listenerCount$1 (emitter, type) {
|
|
return emitter.listeners(type).length;
|
|
}
|
|
function ReadableState(options, stream) {
|
|
|
|
options = options || {};
|
|
|
|
// object stream flag. Used to make read(n) ignore n and to
|
|
// make all the buffer merging and length checks go away
|
|
this.objectMode = !!options.objectMode;
|
|
|
|
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
|
|
|
|
// the point at which it stops calling _read() to fill the buffer
|
|
// Note: 0 is a valid value, means "don't call _read preemptively ever"
|
|
var hwm = options.highWaterMark;
|
|
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
|
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
|
|
|
|
// cast to ints.
|
|
this.highWaterMark = ~ ~this.highWaterMark;
|
|
|
|
// A linked list is used to store data chunks instead of an array because the
|
|
// linked list can remove elements from the beginning faster than
|
|
// array.shift()
|
|
this.buffer = new BufferList();
|
|
this.length = 0;
|
|
this.pipes = null;
|
|
this.pipesCount = 0;
|
|
this.flowing = null;
|
|
this.ended = false;
|
|
this.endEmitted = false;
|
|
this.reading = false;
|
|
|
|
// a flag to be able to tell if the onwrite cb is called immediately,
|
|
// or on a later tick. We set this to true at first, because any
|
|
// actions that shouldn't happen until "later" should generally also
|
|
// not happen before the first write call.
|
|
this.sync = true;
|
|
|
|
// whenever we return null, then we set a flag to say
|
|
// that we're awaiting a 'readable' event emission.
|
|
this.needReadable = false;
|
|
this.emittedReadable = false;
|
|
this.readableListening = false;
|
|
this.resumeScheduled = false;
|
|
|
|
// Crypto is kind of old and crusty. Historically, its default string
|
|
// encoding is 'binary' so we have to make this configurable.
|
|
// Everything else in the universe uses 'utf8', though.
|
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
|
|
|
// when piping, we only care about 'readable' events that happen
|
|
// after read()ing all the bytes and not getting any pushback.
|
|
this.ranOut = false;
|
|
|
|
// the number of writers that are awaiting a drain event in .pipe()s
|
|
this.awaitDrain = 0;
|
|
|
|
// if true, a maybeReadMore has been scheduled
|
|
this.readingMore = false;
|
|
|
|
this.decoder = null;
|
|
this.encoding = null;
|
|
if (options.encoding) {
|
|
this.decoder = new StringDecoder(options.encoding);
|
|
this.encoding = options.encoding;
|
|
}
|
|
}
|
|
function Readable(options) {
|
|
|
|
if (!(this instanceof Readable)) return new Readable(options);
|
|
|
|
this._readableState = new ReadableState(options, this);
|
|
|
|
// legacy
|
|
this.readable = true;
|
|
|
|
if (options && typeof options.read === 'function') this._read = options.read;
|
|
|
|
EventEmitter.call(this);
|
|
}
|
|
|
|
// Manually shove something into the read() buffer.
|
|
// This returns true if the highWaterMark has not been hit yet,
|
|
// similar to how Writable.write() returns true if you should
|
|
// write() some more.
|
|
Readable.prototype.push = function (chunk, encoding) {
|
|
var state = this._readableState;
|
|
|
|
if (!state.objectMode && typeof chunk === 'string') {
|
|
encoding = encoding || state.defaultEncoding;
|
|
if (encoding !== state.encoding) {
|
|
chunk = Buffer.from(chunk, encoding);
|
|
encoding = '';
|
|
}
|
|
}
|
|
|
|
return readableAddChunk(this, state, chunk, encoding, false);
|
|
};
|
|
|
|
// Unshift should *always* be something directly out of read()
|
|
Readable.prototype.unshift = function (chunk) {
|
|
var state = this._readableState;
|
|
return readableAddChunk(this, state, chunk, '', true);
|
|
};
|
|
|
|
Readable.prototype.isPaused = function () {
|
|
return this._readableState.flowing === false;
|
|
};
|
|
|
|
function readableAddChunk(stream, state, chunk, encoding, addToFront) {
|
|
var er = chunkInvalid(state, chunk);
|
|
if (er) {
|
|
stream.emit('error', er);
|
|
} else if (chunk === null) {
|
|
state.reading = false;
|
|
onEofChunk(stream, state);
|
|
} else if (state.objectMode || chunk && chunk.length > 0) {
|
|
if (state.ended && !addToFront) {
|
|
var e = new Error('stream.push() after EOF');
|
|
stream.emit('error', e);
|
|
} else if (state.endEmitted && addToFront) {
|
|
var _e = new Error('stream.unshift() after end event');
|
|
stream.emit('error', _e);
|
|
} else {
|
|
var skipAdd;
|
|
if (state.decoder && !addToFront && !encoding) {
|
|
chunk = state.decoder.write(chunk);
|
|
skipAdd = !state.objectMode && chunk.length === 0;
|
|
}
|
|
|
|
if (!addToFront) state.reading = false;
|
|
|
|
// Don't add to the buffer if we've decoded to an empty string chunk and
|
|
// we're not in object mode
|
|
if (!skipAdd) {
|
|
// if we want the data now, just emit it.
|
|
if (state.flowing && state.length === 0 && !state.sync) {
|
|
stream.emit('data', chunk);
|
|
stream.read(0);
|
|
} else {
|
|
// update the buffer info.
|
|
state.length += state.objectMode ? 1 : chunk.length;
|
|
if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
|
|
|
|
if (state.needReadable) emitReadable(stream);
|
|
}
|
|
}
|
|
|
|
maybeReadMore(stream, state);
|
|
}
|
|
} else if (!addToFront) {
|
|
state.reading = false;
|
|
}
|
|
|
|
return needMoreData(state);
|
|
}
|
|
|
|
// if it's past the high water mark, we can push in some more.
|
|
// Also, if we have no data yet, we can stand some
|
|
// more bytes. This is to work around cases where hwm=0,
|
|
// such as the repl. Also, if the push() triggered a
|
|
// readable event, and the user called read(largeNumber) such that
|
|
// needReadable was set, then we ought to push more, so that another
|
|
// 'readable' event will be triggered.
|
|
function needMoreData(state) {
|
|
return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
|
|
}
|
|
|
|
// backwards compatibility.
|
|
Readable.prototype.setEncoding = function (enc) {
|
|
this._readableState.decoder = new StringDecoder(enc);
|
|
this._readableState.encoding = enc;
|
|
return this;
|
|
};
|
|
|
|
// Don't raise the hwm > 8MB
|
|
var MAX_HWM = 0x800000;
|
|
function computeNewHighWaterMark(n) {
|
|
if (n >= MAX_HWM) {
|
|
n = MAX_HWM;
|
|
} else {
|
|
// Get the next highest power of 2 to prevent increasing hwm excessively in
|
|
// tiny amounts
|
|
n--;
|
|
n |= n >>> 1;
|
|
n |= n >>> 2;
|
|
n |= n >>> 4;
|
|
n |= n >>> 8;
|
|
n |= n >>> 16;
|
|
n++;
|
|
}
|
|
return n;
|
|
}
|
|
|
|
// This function is designed to be inlinable, so please take care when making
|
|
// changes to the function body.
|
|
function howMuchToRead(n, state) {
|
|
if (n <= 0 || state.length === 0 && state.ended) return 0;
|
|
if (state.objectMode) return 1;
|
|
if (n !== n) {
|
|
// Only flow one buffer at a time
|
|
if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
|
|
}
|
|
// If we're asking for more than the current hwm, then raise the hwm.
|
|
if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
|
|
if (n <= state.length) return n;
|
|
// Don't have enough
|
|
if (!state.ended) {
|
|
state.needReadable = true;
|
|
return 0;
|
|
}
|
|
return state.length;
|
|
}
|
|
|
|
// you can override either this method, or the async _read(n) below.
|
|
Readable.prototype.read = function (n) {
|
|
debug('read', n);
|
|
n = parseInt(n, 10);
|
|
var state = this._readableState;
|
|
var nOrig = n;
|
|
|
|
if (n !== 0) state.emittedReadable = false;
|
|
|
|
// if we're doing read(0) to trigger a readable event, but we
|
|
// already have a bunch of data in the buffer, then just trigger
|
|
// the 'readable' event and move on.
|
|
if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
|
|
debug('read: emitReadable', state.length, state.ended);
|
|
if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
|
|
return null;
|
|
}
|
|
|
|
n = howMuchToRead(n, state);
|
|
|
|
// if we've ended, and we're now clear, then finish it up.
|
|
if (n === 0 && state.ended) {
|
|
if (state.length === 0) endReadable(this);
|
|
return null;
|
|
}
|
|
|
|
// All the actual chunk generation logic needs to be
|
|
// *below* the call to _read. The reason is that in certain
|
|
// synthetic stream cases, such as passthrough streams, _read
|
|
// may be a completely synchronous operation which may change
|
|
// the state of the read buffer, providing enough data when
|
|
// before there was *not* enough.
|
|
//
|
|
// So, the steps are:
|
|
// 1. Figure out what the state of things will be after we do
|
|
// a read from the buffer.
|
|
//
|
|
// 2. If that resulting state will trigger a _read, then call _read.
|
|
// Note that this may be asynchronous, or synchronous. Yes, it is
|
|
// deeply ugly to write APIs this way, but that still doesn't mean
|
|
// that the Readable class should behave improperly, as streams are
|
|
// designed to be sync/async agnostic.
|
|
// Take note if the _read call is sync or async (ie, if the read call
|
|
// has returned yet), so that we know whether or not it's safe to emit
|
|
// 'readable' etc.
|
|
//
|
|
// 3. Actually pull the requested chunks out of the buffer and return.
|
|
|
|
// if we need a readable event, then we need to do some reading.
|
|
var doRead = state.needReadable;
|
|
debug('need readable', doRead);
|
|
|
|
// if we currently have less than the highWaterMark, then also read some
|
|
if (state.length === 0 || state.length - n < state.highWaterMark) {
|
|
doRead = true;
|
|
debug('length less than watermark', doRead);
|
|
}
|
|
|
|
// however, if we've ended, then there's no point, and if we're already
|
|
// reading, then it's unnecessary.
|
|
if (state.ended || state.reading) {
|
|
doRead = false;
|
|
debug('reading or ended', doRead);
|
|
} else if (doRead) {
|
|
debug('do read');
|
|
state.reading = true;
|
|
state.sync = true;
|
|
// if the length is currently zero, then we *need* a readable event.
|
|
if (state.length === 0) state.needReadable = true;
|
|
// call internal read method
|
|
this._read(state.highWaterMark);
|
|
state.sync = false;
|
|
// If _read pushed data synchronously, then `reading` will be false,
|
|
// and we need to re-evaluate how much data we can return to the user.
|
|
if (!state.reading) n = howMuchToRead(nOrig, state);
|
|
}
|
|
|
|
var ret;
|
|
if (n > 0) ret = fromList(n, state);else ret = null;
|
|
|
|
if (ret === null) {
|
|
state.needReadable = true;
|
|
n = 0;
|
|
} else {
|
|
state.length -= n;
|
|
}
|
|
|
|
if (state.length === 0) {
|
|
// If we have nothing in the buffer, then we want to know
|
|
// as soon as we *do* get something into the buffer.
|
|
if (!state.ended) state.needReadable = true;
|
|
|
|
// If we tried to read() past the EOF, then emit end on the next tick.
|
|
if (nOrig !== n && state.ended) endReadable(this);
|
|
}
|
|
|
|
if (ret !== null) this.emit('data', ret);
|
|
|
|
return ret;
|
|
};
|
|
|
|
function chunkInvalid(state, chunk) {
|
|
var er = null;
|
|
if (!isBuffer(chunk) && typeof chunk !== 'string' && chunk !== null && chunk !== undefined && !state.objectMode) {
|
|
er = new TypeError('Invalid non-string/buffer chunk');
|
|
}
|
|
return er;
|
|
}
|
|
|
|
function onEofChunk(stream, state) {
|
|
if (state.ended) return;
|
|
if (state.decoder) {
|
|
var chunk = state.decoder.end();
|
|
if (chunk && chunk.length) {
|
|
state.buffer.push(chunk);
|
|
state.length += state.objectMode ? 1 : chunk.length;
|
|
}
|
|
}
|
|
state.ended = true;
|
|
|
|
// emit 'readable' now to make sure it gets picked up.
|
|
emitReadable(stream);
|
|
}
|
|
|
|
// Don't emit readable right away in sync mode, because this can trigger
|
|
// another read() call => stack overflow. This way, it might trigger
|
|
// a nextTick recursion warning, but that's not so bad.
|
|
function emitReadable(stream) {
|
|
var state = stream._readableState;
|
|
state.needReadable = false;
|
|
if (!state.emittedReadable) {
|
|
debug('emitReadable', state.flowing);
|
|
state.emittedReadable = true;
|
|
if (state.sync) nextTick(emitReadable_, stream);else emitReadable_(stream);
|
|
}
|
|
}
|
|
|
|
function emitReadable_(stream) {
|
|
debug('emit readable');
|
|
stream.emit('readable');
|
|
flow(stream);
|
|
}
|
|
|
|
// at this point, the user has presumably seen the 'readable' event,
|
|
// and called read() to consume some data. that may have triggered
|
|
// in turn another _read(n) call, in which case reading = true if
|
|
// it's in progress.
|
|
// However, if we're not ended, or reading, and the length < hwm,
|
|
// then go ahead and try to read some more preemptively.
|
|
function maybeReadMore(stream, state) {
|
|
if (!state.readingMore) {
|
|
state.readingMore = true;
|
|
nextTick(maybeReadMore_, stream, state);
|
|
}
|
|
}
|
|
|
|
function maybeReadMore_(stream, state) {
|
|
var len = state.length;
|
|
while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
|
|
debug('maybeReadMore read 0');
|
|
stream.read(0);
|
|
if (len === state.length)
|
|
// didn't get any data, stop spinning.
|
|
break;else len = state.length;
|
|
}
|
|
state.readingMore = false;
|
|
}
|
|
|
|
// abstract method. to be overridden in specific implementation classes.
|
|
// call cb(er, data) where data is <= n in length.
|
|
// for virtual (non-string, non-buffer) streams, "length" is somewhat
|
|
// arbitrary, and perhaps not very meaningful.
|
|
Readable.prototype._read = function (n) {
|
|
this.emit('error', new Error('not implemented'));
|
|
};
|
|
|
|
Readable.prototype.pipe = function (dest, pipeOpts) {
|
|
var src = this;
|
|
var state = this._readableState;
|
|
|
|
switch (state.pipesCount) {
|
|
case 0:
|
|
state.pipes = dest;
|
|
break;
|
|
case 1:
|
|
state.pipes = [state.pipes, dest];
|
|
break;
|
|
default:
|
|
state.pipes.push(dest);
|
|
break;
|
|
}
|
|
state.pipesCount += 1;
|
|
debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
|
|
|
|
var doEnd = (!pipeOpts || pipeOpts.end !== false);
|
|
|
|
var endFn = doEnd ? onend : cleanup;
|
|
if (state.endEmitted) nextTick(endFn);else src.once('end', endFn);
|
|
|
|
dest.on('unpipe', onunpipe);
|
|
function onunpipe(readable) {
|
|
debug('onunpipe');
|
|
if (readable === src) {
|
|
cleanup();
|
|
}
|
|
}
|
|
|
|
function onend() {
|
|
debug('onend');
|
|
dest.end();
|
|
}
|
|
|
|
// when the dest drains, it reduces the awaitDrain counter
|
|
// on the source. This would be more elegant with a .once()
|
|
// handler in flow(), but adding and removing repeatedly is
|
|
// too slow.
|
|
var ondrain = pipeOnDrain(src);
|
|
dest.on('drain', ondrain);
|
|
|
|
var cleanedUp = false;
|
|
function cleanup() {
|
|
debug('cleanup');
|
|
// cleanup event handlers once the pipe is broken
|
|
dest.removeListener('close', onclose);
|
|
dest.removeListener('finish', onfinish);
|
|
dest.removeListener('drain', ondrain);
|
|
dest.removeListener('error', onerror);
|
|
dest.removeListener('unpipe', onunpipe);
|
|
src.removeListener('end', onend);
|
|
src.removeListener('end', cleanup);
|
|
src.removeListener('data', ondata);
|
|
|
|
cleanedUp = true;
|
|
|
|
// if the reader is waiting for a drain event from this
|
|
// specific writer, then it would cause it to never start
|
|
// flowing again.
|
|
// So, if this is awaiting a drain, then we just call it now.
|
|
// If we don't know, then assume that we are waiting for one.
|
|
if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
|
|
}
|
|
|
|
// If the user pushes more data while we're writing to dest then we'll end up
|
|
// in ondata again. However, we only want to increase awaitDrain once because
|
|
// dest will only emit one 'drain' event for the multiple writes.
|
|
// => Introduce a guard on increasing awaitDrain.
|
|
var increasedAwaitDrain = false;
|
|
src.on('data', ondata);
|
|
function ondata(chunk) {
|
|
debug('ondata');
|
|
increasedAwaitDrain = false;
|
|
var ret = dest.write(chunk);
|
|
if (false === ret && !increasedAwaitDrain) {
|
|
// If the user unpiped during `dest.write()`, it is possible
|
|
// to get stuck in a permanently paused state if that write
|
|
// also returned false.
|
|
// => Check whether `dest` is still a piping destination.
|
|
if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
|
|
debug('false write response, pause', src._readableState.awaitDrain);
|
|
src._readableState.awaitDrain++;
|
|
increasedAwaitDrain = true;
|
|
}
|
|
src.pause();
|
|
}
|
|
}
|
|
|
|
// if the dest has an error, then stop piping into it.
|
|
// however, don't suppress the throwing behavior for this.
|
|
function onerror(er) {
|
|
debug('onerror', er);
|
|
unpipe();
|
|
dest.removeListener('error', onerror);
|
|
if (listenerCount$1(dest, 'error') === 0) dest.emit('error', er);
|
|
}
|
|
|
|
// Make sure our error handler is attached before userland ones.
|
|
prependListener(dest, 'error', onerror);
|
|
|
|
// Both close and finish should trigger unpipe, but only once.
|
|
function onclose() {
|
|
dest.removeListener('finish', onfinish);
|
|
unpipe();
|
|
}
|
|
dest.once('close', onclose);
|
|
function onfinish() {
|
|
debug('onfinish');
|
|
dest.removeListener('close', onclose);
|
|
unpipe();
|
|
}
|
|
dest.once('finish', onfinish);
|
|
|
|
function unpipe() {
|
|
debug('unpipe');
|
|
src.unpipe(dest);
|
|
}
|
|
|
|
// tell the dest that it's being piped to
|
|
dest.emit('pipe', src);
|
|
|
|
// start the flow if it hasn't been started already.
|
|
if (!state.flowing) {
|
|
debug('pipe resume');
|
|
src.resume();
|
|
}
|
|
|
|
return dest;
|
|
};
|
|
|
|
function pipeOnDrain(src) {
|
|
return function () {
|
|
var state = src._readableState;
|
|
debug('pipeOnDrain', state.awaitDrain);
|
|
if (state.awaitDrain) state.awaitDrain--;
|
|
if (state.awaitDrain === 0 && src.listeners('data').length) {
|
|
state.flowing = true;
|
|
flow(src);
|
|
}
|
|
};
|
|
}
|
|
|
|
Readable.prototype.unpipe = function (dest) {
|
|
var state = this._readableState;
|
|
|
|
// if we're not piping anywhere, then do nothing.
|
|
if (state.pipesCount === 0) return this;
|
|
|
|
// just one destination. most common case.
|
|
if (state.pipesCount === 1) {
|
|
// passed in one, but it's not the right one.
|
|
if (dest && dest !== state.pipes) return this;
|
|
|
|
if (!dest) dest = state.pipes;
|
|
|
|
// got a match.
|
|
state.pipes = null;
|
|
state.pipesCount = 0;
|
|
state.flowing = false;
|
|
if (dest) dest.emit('unpipe', this);
|
|
return this;
|
|
}
|
|
|
|
// slow case. multiple pipe destinations.
|
|
|
|
if (!dest) {
|
|
// remove all.
|
|
var dests = state.pipes;
|
|
var len = state.pipesCount;
|
|
state.pipes = null;
|
|
state.pipesCount = 0;
|
|
state.flowing = false;
|
|
|
|
for (var _i = 0; _i < len; _i++) {
|
|
dests[_i].emit('unpipe', this);
|
|
}return this;
|
|
}
|
|
|
|
// try to find the right one.
|
|
var i = indexOf(state.pipes, dest);
|
|
if (i === -1) return this;
|
|
|
|
state.pipes.splice(i, 1);
|
|
state.pipesCount -= 1;
|
|
if (state.pipesCount === 1) state.pipes = state.pipes[0];
|
|
|
|
dest.emit('unpipe', this);
|
|
|
|
return this;
|
|
};
|
|
|
|
// set up data events if they are asked for
|
|
// Ensure readable listeners eventually get something
|
|
Readable.prototype.on = function (ev, fn) {
|
|
var res = EventEmitter.prototype.on.call(this, ev, fn);
|
|
|
|
if (ev === 'data') {
|
|
// Start flowing on next tick if stream isn't explicitly paused
|
|
if (this._readableState.flowing !== false) this.resume();
|
|
} else if (ev === 'readable') {
|
|
var state = this._readableState;
|
|
if (!state.endEmitted && !state.readableListening) {
|
|
state.readableListening = state.needReadable = true;
|
|
state.emittedReadable = false;
|
|
if (!state.reading) {
|
|
nextTick(nReadingNextTick, this);
|
|
} else if (state.length) {
|
|
emitReadable(this);
|
|
}
|
|
}
|
|
}
|
|
|
|
return res;
|
|
};
|
|
Readable.prototype.addListener = Readable.prototype.on;
|
|
|
|
function nReadingNextTick(self) {
|
|
debug('readable nexttick read 0');
|
|
self.read(0);
|
|
}
|
|
|
|
// pause() and resume() are remnants of the legacy readable stream API
|
|
// If the user uses them, then switch into old mode.
|
|
Readable.prototype.resume = function () {
|
|
var state = this._readableState;
|
|
if (!state.flowing) {
|
|
debug('resume');
|
|
state.flowing = true;
|
|
resume(this, state);
|
|
}
|
|
return this;
|
|
};
|
|
|
|
function resume(stream, state) {
|
|
if (!state.resumeScheduled) {
|
|
state.resumeScheduled = true;
|
|
nextTick(resume_, stream, state);
|
|
}
|
|
}
|
|
|
|
function resume_(stream, state) {
|
|
if (!state.reading) {
|
|
debug('resume read 0');
|
|
stream.read(0);
|
|
}
|
|
|
|
state.resumeScheduled = false;
|
|
state.awaitDrain = 0;
|
|
stream.emit('resume');
|
|
flow(stream);
|
|
if (state.flowing && !state.reading) stream.read(0);
|
|
}
|
|
|
|
Readable.prototype.pause = function () {
|
|
debug('call pause flowing=%j', this._readableState.flowing);
|
|
if (false !== this._readableState.flowing) {
|
|
debug('pause');
|
|
this._readableState.flowing = false;
|
|
this.emit('pause');
|
|
}
|
|
return this;
|
|
};
|
|
|
|
function flow(stream) {
|
|
var state = stream._readableState;
|
|
debug('flow', state.flowing);
|
|
while (state.flowing && stream.read() !== null) {}
|
|
}
|
|
|
|
// wrap an old-style stream as the async data source.
|
|
// This is *not* part of the readable stream interface.
|
|
// It is an ugly unfortunate mess of history.
|
|
Readable.prototype.wrap = function (stream) {
|
|
var state = this._readableState;
|
|
var paused = false;
|
|
|
|
var self = this;
|
|
stream.on('end', function () {
|
|
debug('wrapped end');
|
|
if (state.decoder && !state.ended) {
|
|
var chunk = state.decoder.end();
|
|
if (chunk && chunk.length) self.push(chunk);
|
|
}
|
|
|
|
self.push(null);
|
|
});
|
|
|
|
stream.on('data', function (chunk) {
|
|
debug('wrapped data');
|
|
if (state.decoder) chunk = state.decoder.write(chunk);
|
|
|
|
// don't skip over falsy values in objectMode
|
|
if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
|
|
|
|
var ret = self.push(chunk);
|
|
if (!ret) {
|
|
paused = true;
|
|
stream.pause();
|
|
}
|
|
});
|
|
|
|
// proxy all the other methods.
|
|
// important when wrapping filters and duplexes.
|
|
for (var i in stream) {
|
|
if (this[i] === undefined && typeof stream[i] === 'function') {
|
|
this[i] = function (method) {
|
|
return function () {
|
|
return stream[method].apply(stream, arguments);
|
|
};
|
|
}(i);
|
|
}
|
|
}
|
|
|
|
// proxy certain important events.
|
|
var events = ['error', 'close', 'destroy', 'pause', 'resume'];
|
|
forEach(events, function (ev) {
|
|
stream.on(ev, self.emit.bind(self, ev));
|
|
});
|
|
|
|
// when we try to consume some more bytes, simply unpause the
|
|
// underlying stream.
|
|
self._read = function (n) {
|
|
debug('wrapped _read', n);
|
|
if (paused) {
|
|
paused = false;
|
|
stream.resume();
|
|
}
|
|
};
|
|
|
|
return self;
|
|
};
|
|
|
|
// exposed for testing purposes only.
|
|
Readable._fromList = fromList;
|
|
|
|
// Pluck off n bytes from an array of buffers.
|
|
// Length is the combined lengths of all the buffers in the list.
|
|
// This function is designed to be inlinable, so please take care when making
|
|
// changes to the function body.
|
|
function fromList(n, state) {
|
|
// nothing buffered
|
|
if (state.length === 0) return null;
|
|
|
|
var ret;
|
|
if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
|
|
// read it all, truncate the list
|
|
if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
|
|
state.buffer.clear();
|
|
} else {
|
|
// read part of list
|
|
ret = fromListPartial(n, state.buffer, state.decoder);
|
|
}
|
|
|
|
return ret;
|
|
}
|
|
|
|
// Extracts only enough buffered data to satisfy the amount requested.
|
|
// This function is designed to be inlinable, so please take care when making
|
|
// changes to the function body.
|
|
function fromListPartial(n, list, hasStrings) {
|
|
var ret;
|
|
if (n < list.head.data.length) {
|
|
// slice is the same for buffers and strings
|
|
ret = list.head.data.slice(0, n);
|
|
list.head.data = list.head.data.slice(n);
|
|
} else if (n === list.head.data.length) {
|
|
// first chunk is a perfect match
|
|
ret = list.shift();
|
|
} else {
|
|
// result spans more than one buffer
|
|
ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
|
|
}
|
|
return ret;
|
|
}
|
|
|
|
// Copies a specified amount of characters from the list of buffered data
|
|
// chunks.
|
|
// This function is designed to be inlinable, so please take care when making
|
|
// changes to the function body.
|
|
function copyFromBufferString(n, list) {
|
|
var p = list.head;
|
|
var c = 1;
|
|
var ret = p.data;
|
|
n -= ret.length;
|
|
while (p = p.next) {
|
|
var str = p.data;
|
|
var nb = n > str.length ? str.length : n;
|
|
if (nb === str.length) ret += str;else ret += str.slice(0, n);
|
|
n -= nb;
|
|
if (n === 0) {
|
|
if (nb === str.length) {
|
|
++c;
|
|
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
|
} else {
|
|
list.head = p;
|
|
p.data = str.slice(nb);
|
|
}
|
|
break;
|
|
}
|
|
++c;
|
|
}
|
|
list.length -= c;
|
|
return ret;
|
|
}
|
|
|
|
// Copies a specified amount of bytes from the list of buffered data chunks.
|
|
// This function is designed to be inlinable, so please take care when making
|
|
// changes to the function body.
|
|
function copyFromBuffer(n, list) {
|
|
var ret = Buffer.allocUnsafe(n);
|
|
var p = list.head;
|
|
var c = 1;
|
|
p.data.copy(ret);
|
|
n -= p.data.length;
|
|
while (p = p.next) {
|
|
var buf = p.data;
|
|
var nb = n > buf.length ? buf.length : n;
|
|
buf.copy(ret, ret.length - n, 0, nb);
|
|
n -= nb;
|
|
if (n === 0) {
|
|
if (nb === buf.length) {
|
|
++c;
|
|
if (p.next) list.head = p.next;else list.head = list.tail = null;
|
|
} else {
|
|
list.head = p;
|
|
p.data = buf.slice(nb);
|
|
}
|
|
break;
|
|
}
|
|
++c;
|
|
}
|
|
list.length -= c;
|
|
return ret;
|
|
}
|
|
|
|
function endReadable(stream) {
|
|
var state = stream._readableState;
|
|
|
|
// If we get here before consuming all the bytes, then that is a
|
|
// bug in node. Should never happen.
|
|
if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
|
|
|
|
if (!state.endEmitted) {
|
|
state.ended = true;
|
|
nextTick(endReadableNT, state, stream);
|
|
}
|
|
}
|
|
|
|
function endReadableNT(state, stream) {
|
|
// Check that we didn't get one last unshift.
|
|
if (!state.endEmitted && state.length === 0) {
|
|
state.endEmitted = true;
|
|
stream.readable = false;
|
|
stream.emit('end');
|
|
}
|
|
}
|
|
|
|
function forEach(xs, f) {
|
|
for (var i = 0, l = xs.length; i < l; i++) {
|
|
f(xs[i], i);
|
|
}
|
|
}
|
|
|
|
function indexOf(xs, x) {
|
|
for (var i = 0, l = xs.length; i < l; i++) {
|
|
if (xs[i] === x) return i;
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
// A bit simpler than readable streams.
|
|
Writable.WritableState = WritableState;
|
|
inherits$1(Writable, EventEmitter);
|
|
|
|
function nop() {}
|
|
|
|
function WriteReq(chunk, encoding, cb) {
|
|
this.chunk = chunk;
|
|
this.encoding = encoding;
|
|
this.callback = cb;
|
|
this.next = null;
|
|
}
|
|
|
|
function WritableState(options, stream) {
|
|
Object.defineProperty(this, 'buffer', {
|
|
get: deprecate(function () {
|
|
return this.getBuffer();
|
|
}, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.')
|
|
});
|
|
options = options || {};
|
|
|
|
// object stream flag to indicate whether or not this stream
|
|
// contains buffers or objects.
|
|
this.objectMode = !!options.objectMode;
|
|
|
|
if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
|
|
|
|
// the point at which write() starts returning false
|
|
// Note: 0 is a valid value, means that we always return false if
|
|
// the entire buffer is not flushed immediately on write()
|
|
var hwm = options.highWaterMark;
|
|
var defaultHwm = this.objectMode ? 16 : 16 * 1024;
|
|
this.highWaterMark = hwm || hwm === 0 ? hwm : defaultHwm;
|
|
|
|
// cast to ints.
|
|
this.highWaterMark = ~ ~this.highWaterMark;
|
|
|
|
this.needDrain = false;
|
|
// at the start of calling end()
|
|
this.ending = false;
|
|
// when end() has been called, and returned
|
|
this.ended = false;
|
|
// when 'finish' is emitted
|
|
this.finished = false;
|
|
|
|
// should we decode strings into buffers before passing to _write?
|
|
// this is here so that some node-core streams can optimize string
|
|
// handling at a lower level.
|
|
var noDecode = options.decodeStrings === false;
|
|
this.decodeStrings = !noDecode;
|
|
|
|
// Crypto is kind of old and crusty. Historically, its default string
|
|
// encoding is 'binary' so we have to make this configurable.
|
|
// Everything else in the universe uses 'utf8', though.
|
|
this.defaultEncoding = options.defaultEncoding || 'utf8';
|
|
|
|
// not an actual buffer we keep track of, but a measurement
|
|
// of how much we're waiting to get pushed to some underlying
|
|
// socket or file.
|
|
this.length = 0;
|
|
|
|
// a flag to see when we're in the middle of a write.
|
|
this.writing = false;
|
|
|
|
// when true all writes will be buffered until .uncork() call
|
|
this.corked = 0;
|
|
|
|
// a flag to be able to tell if the onwrite cb is called immediately,
|
|
// or on a later tick. We set this to true at first, because any
|
|
// actions that shouldn't happen until "later" should generally also
|
|
// not happen before the first write call.
|
|
this.sync = true;
|
|
|
|
// a flag to know if we're processing previously buffered items, which
|
|
// may call the _write() callback in the same tick, so that we don't
|
|
// end up in an overlapped onwrite situation.
|
|
this.bufferProcessing = false;
|
|
|
|
// the callback that's passed to _write(chunk,cb)
|
|
this.onwrite = function (er) {
|
|
onwrite(stream, er);
|
|
};
|
|
|
|
// the callback that the user supplies to write(chunk,encoding,cb)
|
|
this.writecb = null;
|
|
|
|
// the amount that is being written when _write is called.
|
|
this.writelen = 0;
|
|
|
|
this.bufferedRequest = null;
|
|
this.lastBufferedRequest = null;
|
|
|
|
// number of pending user-supplied write callbacks
|
|
// this must be 0 before 'finish' can be emitted
|
|
this.pendingcb = 0;
|
|
|
|
// emit prefinish if the only thing we're waiting for is _write cbs
|
|
// This is relevant for synchronous Transform streams
|
|
this.prefinished = false;
|
|
|
|
// True if the error was already emitted and should not be thrown again
|
|
this.errorEmitted = false;
|
|
|
|
// count buffered requests
|
|
this.bufferedRequestCount = 0;
|
|
|
|
// allocate the first CorkedRequest, there is always
|
|
// one allocated and free to use, and we maintain at most two
|
|
this.corkedRequestsFree = new CorkedRequest(this);
|
|
}
|
|
|
|
WritableState.prototype.getBuffer = function writableStateGetBuffer() {
|
|
var current = this.bufferedRequest;
|
|
var out = [];
|
|
while (current) {
|
|
out.push(current);
|
|
current = current.next;
|
|
}
|
|
return out;
|
|
};
|
|
function Writable(options) {
|
|
|
|
// Writable ctor is applied to Duplexes, though they're not
|
|
// instanceof Writable, they're instanceof Readable.
|
|
if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options);
|
|
|
|
this._writableState = new WritableState(options, this);
|
|
|
|
// legacy.
|
|
this.writable = true;
|
|
|
|
if (options) {
|
|
if (typeof options.write === 'function') this._write = options.write;
|
|
|
|
if (typeof options.writev === 'function') this._writev = options.writev;
|
|
}
|
|
|
|
EventEmitter.call(this);
|
|
}
|
|
|
|
// Otherwise people can pipe Writable streams, which is just wrong.
|
|
Writable.prototype.pipe = function () {
|
|
this.emit('error', new Error('Cannot pipe, not readable'));
|
|
};
|
|
|
|
function writeAfterEnd(stream, cb) {
|
|
var er = new Error('write after end');
|
|
// TODO: defer error events consistently everywhere, not just the cb
|
|
stream.emit('error', er);
|
|
nextTick(cb, er);
|
|
}
|
|
|
|
// If we get something that is not a buffer, string, null, or undefined,
|
|
// and we're not in objectMode, then that's an error.
|
|
// Otherwise stream chunks are all considered to be of length=1, and the
|
|
// watermarks determine how many objects to keep in the buffer, rather than
|
|
// how many bytes or characters.
|
|
function validChunk(stream, state, chunk, cb) {
|
|
var valid = true;
|
|
var er = false;
|
|
// Always throw error if a null is written
|
|
// if we are not in object mode then throw
|
|
// if it is not a buffer, string, or undefined.
|
|
if (chunk === null) {
|
|
er = new TypeError('May not write null values to stream');
|
|
} else if (!Buffer.isBuffer(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
|
|
er = new TypeError('Invalid non-string/buffer chunk');
|
|
}
|
|
if (er) {
|
|
stream.emit('error', er);
|
|
nextTick(cb, er);
|
|
valid = false;
|
|
}
|
|
return valid;
|
|
}
|
|
|
|
Writable.prototype.write = function (chunk, encoding, cb) {
|
|
var state = this._writableState;
|
|
var ret = false;
|
|
|
|
if (typeof encoding === 'function') {
|
|
cb = encoding;
|
|
encoding = null;
|
|
}
|
|
|
|
if (Buffer.isBuffer(chunk)) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
|
|
|
|
if (typeof cb !== 'function') cb = nop;
|
|
|
|
if (state.ended) writeAfterEnd(this, cb);else if (validChunk(this, state, chunk, cb)) {
|
|
state.pendingcb++;
|
|
ret = writeOrBuffer(this, state, chunk, encoding, cb);
|
|
}
|
|
|
|
return ret;
|
|
};
|
|
|
|
Writable.prototype.cork = function () {
|
|
var state = this._writableState;
|
|
|
|
state.corked++;
|
|
};
|
|
|
|
Writable.prototype.uncork = function () {
|
|
var state = this._writableState;
|
|
|
|
if (state.corked) {
|
|
state.corked--;
|
|
|
|
if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
|
|
}
|
|
};
|
|
|
|
Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
|
|
// node::ParseEncoding() requires lower case.
|
|
if (typeof encoding === 'string') encoding = encoding.toLowerCase();
|
|
if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
|
|
this._writableState.defaultEncoding = encoding;
|
|
return this;
|
|
};
|
|
|
|
function decodeChunk(state, chunk, encoding) {
|
|
if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
|
|
chunk = Buffer.from(chunk, encoding);
|
|
}
|
|
return chunk;
|
|
}
|
|
|
|
// if we're already writing something, then just put this
|
|
// in the queue, and wait our turn. Otherwise, call _write
|
|
// If we return false, then we need a drain event, so set that flag.
|
|
function writeOrBuffer(stream, state, chunk, encoding, cb) {
|
|
chunk = decodeChunk(state, chunk, encoding);
|
|
|
|
if (Buffer.isBuffer(chunk)) encoding = 'buffer';
|
|
var len = state.objectMode ? 1 : chunk.length;
|
|
|
|
state.length += len;
|
|
|
|
var ret = state.length < state.highWaterMark;
|
|
// we must ensure that previous needDrain will not be reset to false.
|
|
if (!ret) state.needDrain = true;
|
|
|
|
if (state.writing || state.corked) {
|
|
var last = state.lastBufferedRequest;
|
|
state.lastBufferedRequest = new WriteReq(chunk, encoding, cb);
|
|
if (last) {
|
|
last.next = state.lastBufferedRequest;
|
|
} else {
|
|
state.bufferedRequest = state.lastBufferedRequest;
|
|
}
|
|
state.bufferedRequestCount += 1;
|
|
} else {
|
|
doWrite(stream, state, false, len, chunk, encoding, cb);
|
|
}
|
|
|
|
return ret;
|
|
}
|
|
|
|
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
|
|
state.writelen = len;
|
|
state.writecb = cb;
|
|
state.writing = true;
|
|
state.sync = true;
|
|
if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
|
|
state.sync = false;
|
|
}
|
|
|
|
function onwriteError(stream, state, sync, er, cb) {
|
|
--state.pendingcb;
|
|
if (sync) nextTick(cb, er);else cb(er);
|
|
|
|
stream._writableState.errorEmitted = true;
|
|
stream.emit('error', er);
|
|
}
|
|
|
|
function onwriteStateUpdate(state) {
|
|
state.writing = false;
|
|
state.writecb = null;
|
|
state.length -= state.writelen;
|
|
state.writelen = 0;
|
|
}
|
|
|
|
function onwrite(stream, er) {
|
|
var state = stream._writableState;
|
|
var sync = state.sync;
|
|
var cb = state.writecb;
|
|
|
|
onwriteStateUpdate(state);
|
|
|
|
if (er) onwriteError(stream, state, sync, er, cb);else {
|
|
// Check if we're actually ready to finish, but don't emit yet
|
|
var finished = needFinish(state);
|
|
|
|
if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
|
|
clearBuffer(stream, state);
|
|
}
|
|
|
|
if (sync) {
|
|
/*<replacement>*/
|
|
nextTick(afterWrite, stream, state, finished, cb);
|
|
/*</replacement>*/
|
|
} else {
|
|
afterWrite(stream, state, finished, cb);
|
|
}
|
|
}
|
|
}
|
|
|
|
function afterWrite(stream, state, finished, cb) {
|
|
if (!finished) onwriteDrain(stream, state);
|
|
state.pendingcb--;
|
|
cb();
|
|
finishMaybe(stream, state);
|
|
}
|
|
|
|
// Must force callback to be called on nextTick, so that we don't
|
|
// emit 'drain' before the write() consumer gets the 'false' return
|
|
// value, and has a chance to attach a 'drain' listener.
|
|
function onwriteDrain(stream, state) {
|
|
if (state.length === 0 && state.needDrain) {
|
|
state.needDrain = false;
|
|
stream.emit('drain');
|
|
}
|
|
}
|
|
|
|
// if there's something in the buffer waiting, then process it
|
|
function clearBuffer(stream, state) {
|
|
state.bufferProcessing = true;
|
|
var entry = state.bufferedRequest;
|
|
|
|
if (stream._writev && entry && entry.next) {
|
|
// Fast case, write everything using _writev()
|
|
var l = state.bufferedRequestCount;
|
|
var buffer = new Array(l);
|
|
var holder = state.corkedRequestsFree;
|
|
holder.entry = entry;
|
|
|
|
var count = 0;
|
|
while (entry) {
|
|
buffer[count] = entry;
|
|
entry = entry.next;
|
|
count += 1;
|
|
}
|
|
|
|
doWrite(stream, state, true, state.length, buffer, '', holder.finish);
|
|
|
|
// doWrite is almost always async, defer these to save a bit of time
|
|
// as the hot path ends with doWrite
|
|
state.pendingcb++;
|
|
state.lastBufferedRequest = null;
|
|
if (holder.next) {
|
|
state.corkedRequestsFree = holder.next;
|
|
holder.next = null;
|
|
} else {
|
|
state.corkedRequestsFree = new CorkedRequest(state);
|
|
}
|
|
} else {
|
|
// Slow case, write chunks one-by-one
|
|
while (entry) {
|
|
var chunk = entry.chunk;
|
|
var encoding = entry.encoding;
|
|
var cb = entry.callback;
|
|
var len = state.objectMode ? 1 : chunk.length;
|
|
|
|
doWrite(stream, state, false, len, chunk, encoding, cb);
|
|
entry = entry.next;
|
|
// if we didn't call the onwrite immediately, then
|
|
// it means that we need to wait until it does.
|
|
// also, that means that the chunk and cb are currently
|
|
// being processed, so move the buffer counter past them.
|
|
if (state.writing) {
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (entry === null) state.lastBufferedRequest = null;
|
|
}
|
|
|
|
state.bufferedRequestCount = 0;
|
|
state.bufferedRequest = entry;
|
|
state.bufferProcessing = false;
|
|
}
|
|
|
|
Writable.prototype._write = function (chunk, encoding, cb) {
|
|
cb(new Error('not implemented'));
|
|
};
|
|
|
|
Writable.prototype._writev = null;
|
|
|
|
Writable.prototype.end = function (chunk, encoding, cb) {
|
|
var state = this._writableState;
|
|
|
|
if (typeof chunk === 'function') {
|
|
cb = chunk;
|
|
chunk = null;
|
|
encoding = null;
|
|
} else if (typeof encoding === 'function') {
|
|
cb = encoding;
|
|
encoding = null;
|
|
}
|
|
|
|
if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
|
|
|
|
// .end() fully uncorks
|
|
if (state.corked) {
|
|
state.corked = 1;
|
|
this.uncork();
|
|
}
|
|
|
|
// ignore unnecessary end() calls.
|
|
if (!state.ending && !state.finished) endWritable(this, state, cb);
|
|
};
|
|
|
|
function needFinish(state) {
|
|
return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
|
|
}
|
|
|
|
function prefinish(stream, state) {
|
|
if (!state.prefinished) {
|
|
state.prefinished = true;
|
|
stream.emit('prefinish');
|
|
}
|
|
}
|
|
|
|
function finishMaybe(stream, state) {
|
|
var need = needFinish(state);
|
|
if (need) {
|
|
if (state.pendingcb === 0) {
|
|
prefinish(stream, state);
|
|
state.finished = true;
|
|
stream.emit('finish');
|
|
} else {
|
|
prefinish(stream, state);
|
|
}
|
|
}
|
|
return need;
|
|
}
|
|
|
|
function endWritable(stream, state, cb) {
|
|
state.ending = true;
|
|
finishMaybe(stream, state);
|
|
if (cb) {
|
|
if (state.finished) nextTick(cb);else stream.once('finish', cb);
|
|
}
|
|
state.ended = true;
|
|
stream.writable = false;
|
|
}
|
|
|
|
// It seems a linked list but it is not
|
|
// there will be only 2 of these for each stream
|
|
function CorkedRequest(state) {
|
|
var _this = this;
|
|
|
|
this.next = null;
|
|
this.entry = null;
|
|
|
|
this.finish = function (err) {
|
|
var entry = _this.entry;
|
|
_this.entry = null;
|
|
while (entry) {
|
|
var cb = entry.callback;
|
|
state.pendingcb--;
|
|
cb(err);
|
|
entry = entry.next;
|
|
}
|
|
if (state.corkedRequestsFree) {
|
|
state.corkedRequestsFree.next = _this;
|
|
} else {
|
|
state.corkedRequestsFree = _this;
|
|
}
|
|
};
|
|
}
|
|
|
|
inherits$1(Duplex, Readable);
|
|
|
|
var keys = Object.keys(Writable.prototype);
|
|
for (var v = 0; v < keys.length; v++) {
|
|
var method = keys[v];
|
|
if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
|
|
}
|
|
function Duplex(options) {
|
|
if (!(this instanceof Duplex)) return new Duplex(options);
|
|
|
|
Readable.call(this, options);
|
|
Writable.call(this, options);
|
|
|
|
if (options && options.readable === false) this.readable = false;
|
|
|
|
if (options && options.writable === false) this.writable = false;
|
|
|
|
this.allowHalfOpen = true;
|
|
if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
|
|
|
|
this.once('end', onend);
|
|
}
|
|
|
|
// the no-half-open enforcer
|
|
function onend() {
|
|
// if we allow half-open state, or if the writable side ended,
|
|
// then we're ok.
|
|
if (this.allowHalfOpen || this._writableState.ended) return;
|
|
|
|
// no more data can be written.
|
|
// But allow more writes to happen in this tick.
|
|
nextTick(onEndNT, this);
|
|
}
|
|
|
|
function onEndNT(self) {
|
|
self.end();
|
|
}
|
|
|
|
// a transform stream is a readable/writable stream where you do
|
|
inherits$1(Transform, Duplex);
|
|
|
|
function TransformState(stream) {
|
|
this.afterTransform = function (er, data) {
|
|
return afterTransform(stream, er, data);
|
|
};
|
|
|
|
this.needTransform = false;
|
|
this.transforming = false;
|
|
this.writecb = null;
|
|
this.writechunk = null;
|
|
this.writeencoding = null;
|
|
}
|
|
|
|
function afterTransform(stream, er, data) {
|
|
var ts = stream._transformState;
|
|
ts.transforming = false;
|
|
|
|
var cb = ts.writecb;
|
|
|
|
if (!cb) return stream.emit('error', new Error('no writecb in Transform class'));
|
|
|
|
ts.writechunk = null;
|
|
ts.writecb = null;
|
|
|
|
if (data !== null && data !== undefined) stream.push(data);
|
|
|
|
cb(er);
|
|
|
|
var rs = stream._readableState;
|
|
rs.reading = false;
|
|
if (rs.needReadable || rs.length < rs.highWaterMark) {
|
|
stream._read(rs.highWaterMark);
|
|
}
|
|
}
|
|
function Transform(options) {
|
|
if (!(this instanceof Transform)) return new Transform(options);
|
|
|
|
Duplex.call(this, options);
|
|
|
|
this._transformState = new TransformState(this);
|
|
|
|
// when the writable side finishes, then flush out anything remaining.
|
|
var stream = this;
|
|
|
|
// start out asking for a readable event once data is transformed.
|
|
this._readableState.needReadable = true;
|
|
|
|
// we have implemented the _read method, and done the other things
|
|
// that Readable wants before the first _read call, so unset the
|
|
// sync guard flag.
|
|
this._readableState.sync = false;
|
|
|
|
if (options) {
|
|
if (typeof options.transform === 'function') this._transform = options.transform;
|
|
|
|
if (typeof options.flush === 'function') this._flush = options.flush;
|
|
}
|
|
|
|
this.once('prefinish', function () {
|
|
if (typeof this._flush === 'function') this._flush(function (er) {
|
|
done(stream, er);
|
|
});else done(stream);
|
|
});
|
|
}
|
|
|
|
Transform.prototype.push = function (chunk, encoding) {
|
|
this._transformState.needTransform = false;
|
|
return Duplex.prototype.push.call(this, chunk, encoding);
|
|
};
|
|
|
|
// This is the part where you do stuff!
|
|
// override this function in implementation classes.
|
|
// 'chunk' is an input chunk.
|
|
//
|
|
// Call `push(newChunk)` to pass along transformed output
|
|
// to the readable side. You may call 'push' zero or more times.
|
|
//
|
|
// Call `cb(err)` when you are done with this chunk. If you pass
|
|
// an error, then that'll put the hurt on the whole operation. If you
|
|
// never call cb(), then you'll never get another chunk.
|
|
Transform.prototype._transform = function (chunk, encoding, cb) {
|
|
throw new Error('Not implemented');
|
|
};
|
|
|
|
Transform.prototype._write = function (chunk, encoding, cb) {
|
|
var ts = this._transformState;
|
|
ts.writecb = cb;
|
|
ts.writechunk = chunk;
|
|
ts.writeencoding = encoding;
|
|
if (!ts.transforming) {
|
|
var rs = this._readableState;
|
|
if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
|
|
}
|
|
};
|
|
|
|
// Doesn't matter what the args are here.
|
|
// _transform does all the work.
|
|
// That we got here means that the readable side wants more data.
|
|
Transform.prototype._read = function (n) {
|
|
var ts = this._transformState;
|
|
|
|
if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
|
|
ts.transforming = true;
|
|
this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
|
|
} else {
|
|
// mark that we need a transform, so that any data that comes in
|
|
// will get processed, now that we've asked for it.
|
|
ts.needTransform = true;
|
|
}
|
|
};
|
|
|
|
function done(stream, er) {
|
|
if (er) return stream.emit('error', er);
|
|
|
|
// if there's nothing in the write buffer, then that means
|
|
// that nothing more will ever be provided
|
|
var ws = stream._writableState;
|
|
var ts = stream._transformState;
|
|
|
|
if (ws.length) throw new Error('Calling transform done when ws.length != 0');
|
|
|
|
if (ts.transforming) throw new Error('Calling transform done when still transforming');
|
|
|
|
return stream.push(null);
|
|
}
|
|
|
|
inherits$1(PassThrough, Transform);
|
|
function PassThrough(options) {
|
|
if (!(this instanceof PassThrough)) return new PassThrough(options);
|
|
|
|
Transform.call(this, options);
|
|
}
|
|
|
|
PassThrough.prototype._transform = function (chunk, encoding, cb) {
|
|
cb(null, chunk);
|
|
};
|
|
|
|
inherits$1(Stream, EventEmitter);
|
|
Stream.Readable = Readable;
|
|
Stream.Writable = Writable;
|
|
Stream.Duplex = Duplex;
|
|
Stream.Transform = Transform;
|
|
Stream.PassThrough = PassThrough;
|
|
|
|
// Backwards-compat with node 0.4.x
|
|
Stream.Stream = Stream;
|
|
|
|
// old-style streams. Note that the pipe method (the only relevant
|
|
// part of this class) is overridden in the Readable class.
|
|
|
|
function Stream() {
|
|
EventEmitter.call(this);
|
|
}
|
|
|
|
Stream.prototype.pipe = function(dest, options) {
|
|
var source = this;
|
|
|
|
function ondata(chunk) {
|
|
if (dest.writable) {
|
|
if (false === dest.write(chunk) && source.pause) {
|
|
source.pause();
|
|
}
|
|
}
|
|
}
|
|
|
|
source.on('data', ondata);
|
|
|
|
function ondrain() {
|
|
if (source.readable && source.resume) {
|
|
source.resume();
|
|
}
|
|
}
|
|
|
|
dest.on('drain', ondrain);
|
|
|
|
// If the 'end' option is not supplied, dest.end() will be called when
|
|
// source gets the 'end' or 'close' events. Only dest.end() once.
|
|
if (!dest._isStdio && (!options || options.end !== false)) {
|
|
source.on('end', onend);
|
|
source.on('close', onclose);
|
|
}
|
|
|
|
var didOnEnd = false;
|
|
function onend() {
|
|
if (didOnEnd) return;
|
|
didOnEnd = true;
|
|
|
|
dest.end();
|
|
}
|
|
|
|
|
|
function onclose() {
|
|
if (didOnEnd) return;
|
|
didOnEnd = true;
|
|
|
|
if (typeof dest.destroy === 'function') dest.destroy();
|
|
}
|
|
|
|
// don't leave dangling pipes when there are errors.
|
|
function onerror(er) {
|
|
cleanup();
|
|
if (EventEmitter.listenerCount(this, 'error') === 0) {
|
|
throw er; // Unhandled stream error in pipe.
|
|
}
|
|
}
|
|
|
|
source.on('error', onerror);
|
|
dest.on('error', onerror);
|
|
|
|
// remove all the event listeners that were added.
|
|
function cleanup() {
|
|
source.removeListener('data', ondata);
|
|
dest.removeListener('drain', ondrain);
|
|
|
|
source.removeListener('end', onend);
|
|
source.removeListener('close', onclose);
|
|
|
|
source.removeListener('error', onerror);
|
|
dest.removeListener('error', onerror);
|
|
|
|
source.removeListener('end', cleanup);
|
|
source.removeListener('close', cleanup);
|
|
|
|
dest.removeListener('close', cleanup);
|
|
}
|
|
|
|
source.on('end', cleanup);
|
|
source.on('close', cleanup);
|
|
|
|
dest.on('close', cleanup);
|
|
|
|
dest.emit('pipe', source);
|
|
|
|
// Allow for unix-like usage: A.pipe(B).pipe(C)
|
|
return dest;
|
|
};
|
|
|
|
function _typeof(obj) {
|
|
if (typeof Symbol === "function" && typeof Symbol.iterator === "symbol") {
|
|
_typeof = function (obj) {
|
|
return typeof obj;
|
|
};
|
|
} else {
|
|
_typeof = function (obj) {
|
|
return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj;
|
|
};
|
|
}
|
|
|
|
return _typeof(obj);
|
|
}
|
|
|
|
function _classCallCheck(instance, Constructor) {
|
|
if (!(instance instanceof Constructor)) {
|
|
throw new TypeError("Cannot call a class as a function");
|
|
}
|
|
}
|
|
|
|
function _defineProperties(target, props) {
|
|
for (var i = 0; i < props.length; i++) {
|
|
var descriptor = props[i];
|
|
descriptor.enumerable = descriptor.enumerable || false;
|
|
descriptor.configurable = true;
|
|
if ("value" in descriptor) descriptor.writable = true;
|
|
Object.defineProperty(target, descriptor.key, descriptor);
|
|
}
|
|
}
|
|
|
|
function _createClass(Constructor, protoProps, staticProps) {
|
|
if (protoProps) _defineProperties(Constructor.prototype, protoProps);
|
|
if (staticProps) _defineProperties(Constructor, staticProps);
|
|
return Constructor;
|
|
}
|
|
|
|
function _defineProperty(obj, key, value) {
|
|
if (key in obj) {
|
|
Object.defineProperty(obj, key, {
|
|
value: value,
|
|
enumerable: true,
|
|
configurable: true,
|
|
writable: true
|
|
});
|
|
} else {
|
|
obj[key] = value;
|
|
}
|
|
|
|
return obj;
|
|
}
|
|
|
|
function _inherits(subClass, superClass) {
|
|
if (typeof superClass !== "function" && superClass !== null) {
|
|
throw new TypeError("Super expression must either be null or a function");
|
|
}
|
|
|
|
subClass.prototype = Object.create(superClass && superClass.prototype, {
|
|
constructor: {
|
|
value: subClass,
|
|
writable: true,
|
|
configurable: true
|
|
}
|
|
});
|
|
if (superClass) _setPrototypeOf(subClass, superClass);
|
|
}
|
|
|
|
function _getPrototypeOf(o) {
|
|
_getPrototypeOf = Object.setPrototypeOf ? Object.getPrototypeOf : function _getPrototypeOf(o) {
|
|
return o.__proto__ || Object.getPrototypeOf(o);
|
|
};
|
|
return _getPrototypeOf(o);
|
|
}
|
|
|
|
function _setPrototypeOf(o, p) {
|
|
_setPrototypeOf = Object.setPrototypeOf || function _setPrototypeOf(o, p) {
|
|
o.__proto__ = p;
|
|
return o;
|
|
};
|
|
|
|
return _setPrototypeOf(o, p);
|
|
}
|
|
|
|
function _assertThisInitialized(self) {
|
|
if (self === void 0) {
|
|
throw new ReferenceError("this hasn't been initialised - super() hasn't been called");
|
|
}
|
|
|
|
return self;
|
|
}
|
|
|
|
function _possibleConstructorReturn(self, call) {
|
|
if (call && (typeof call === "object" || typeof call === "function")) {
|
|
return call;
|
|
}
|
|
|
|
return _assertThisInitialized(self);
|
|
}
|
|
|
|
function _toArray(arr) {
|
|
return _arrayWithHoles(arr) || _iterableToArray(arr) || _nonIterableRest();
|
|
}
|
|
|
|
function _toConsumableArray(arr) {
|
|
return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _nonIterableSpread();
|
|
}
|
|
|
|
function _arrayWithoutHoles(arr) {
|
|
if (Array.isArray(arr)) {
|
|
for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) arr2[i] = arr[i];
|
|
|
|
return arr2;
|
|
}
|
|
}
|
|
|
|
function _arrayWithHoles(arr) {
|
|
if (Array.isArray(arr)) return arr;
|
|
}
|
|
|
|
function _iterableToArray(iter) {
|
|
if (Symbol.iterator in Object(iter) || Object.prototype.toString.call(iter) === "[object Arguments]") return Array.from(iter);
|
|
}
|
|
|
|
function _nonIterableSpread() {
|
|
throw new TypeError("Invalid attempt to spread non-iterable instance");
|
|
}
|
|
|
|
function _nonIterableRest() {
|
|
throw new TypeError("Invalid attempt to destructure non-iterable instance");
|
|
}
|
|
|
|
/*
|
|
The MIT License (MIT)
|
|
|
|
Copyright (c) 2016 CoderPuppy
|
|
|
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
of this software and associated documentation files (the "Software"), to deal
|
|
in the Software without restriction, including without limitation the rights
|
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
copies of the Software, and to permit persons to whom the Software is
|
|
furnished to do so, subject to the following conditions:
|
|
|
|
The above copyright notice and this permission notice shall be included in all
|
|
copies or substantial portions of the Software.
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
SOFTWARE.
|
|
|
|
*/
|
|
var _endianness;
|
|
function endianness() {
|
|
if (typeof _endianness === 'undefined') {
|
|
var a = new ArrayBuffer(2);
|
|
var b = new Uint8Array(a);
|
|
var c = new Uint16Array(a);
|
|
b[0] = 1;
|
|
b[1] = 2;
|
|
if (c[0] === 258) {
|
|
_endianness = 'BE';
|
|
} else if (c[0] === 513){
|
|
_endianness = 'LE';
|
|
} else {
|
|
throw new Error('unable to figure out endianess');
|
|
}
|
|
}
|
|
return _endianness;
|
|
}
|
|
|
|
function hostname() {
|
|
if (typeof global$1.location !== 'undefined') {
|
|
return global$1.location.hostname
|
|
} else return '';
|
|
}
|
|
|
|
function loadavg() {
|
|
return [];
|
|
}
|
|
|
|
function uptime() {
|
|
return 0;
|
|
}
|
|
|
|
function freemem() {
|
|
return Number.MAX_VALUE;
|
|
}
|
|
|
|
function totalmem() {
|
|
return Number.MAX_VALUE;
|
|
}
|
|
|
|
function cpus() {
|
|
return [];
|
|
}
|
|
|
|
function type() {
|
|
return 'Browser';
|
|
}
|
|
|
|
function release () {
|
|
if (typeof global$1.navigator !== 'undefined') {
|
|
return global$1.navigator.appVersion;
|
|
}
|
|
return '';
|
|
}
|
|
|
|
function networkInterfaces(){}
|
|
function getNetworkInterfaces(){}
|
|
|
|
function tmpDir() {
|
|
return '/tmp';
|
|
}
|
|
var tmpdir = tmpDir;
|
|
|
|
var EOL = '\n';
|
|
var os = {
|
|
EOL: EOL,
|
|
tmpdir: tmpdir,
|
|
tmpDir: tmpDir,
|
|
networkInterfaces:networkInterfaces,
|
|
getNetworkInterfaces: getNetworkInterfaces,
|
|
release: release,
|
|
type: type,
|
|
cpus: cpus,
|
|
totalmem: totalmem,
|
|
freemem: freemem,
|
|
uptime: uptime,
|
|
loadavg: loadavg,
|
|
hostname: hostname,
|
|
endianness: endianness,
|
|
};
|
|
|
|
var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {};
|
|
|
|
/**
|
|
* lodash (Custom Build) <https://lodash.com/>
|
|
* Build: `lodash modularize exports="npm" -o ./`
|
|
* Copyright jQuery Foundation and other contributors <https://jquery.org/>
|
|
* Released under MIT license <https://lodash.com/license>
|
|
* Based on Underscore.js 1.8.3 <http://underscorejs.org/LICENSE>
|
|
* Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors
|
|
*/
|
|
|
|
/** Used as the `TypeError` message for "Functions" methods. */
|
|
var FUNC_ERROR_TEXT = 'Expected a function';
|
|
|
|
/** Used to stand-in for `undefined` hash values. */
|
|
var HASH_UNDEFINED = '__lodash_hash_undefined__';
|
|
|
|
/** Used as references for various `Number` constants. */
|
|
var INFINITY = 1 / 0;
|
|
|
|
/** `Object#toString` result references. */
|
|
var funcTag = '[object Function]',
|
|
genTag = '[object GeneratorFunction]',
|
|
symbolTag = '[object Symbol]';
|
|
|
|
/** Used to match property names within property paths. */
|
|
var reIsDeepProp = /\.|\[(?:[^[\]]*|(["'])(?:(?!\1)[^\\]|\\.)*?\1)\]/,
|
|
reIsPlainProp = /^\w*$/,
|
|
reLeadingDot = /^\./,
|
|
rePropName = /[^.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|$))/g;
|
|
|
|
/**
|
|
* Used to match `RegExp`
|
|
* [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns).
|
|
*/
|
|
var reRegExpChar = /[\\^$.*+?()[\]{}|]/g;
|
|
|
|
/** Used to match backslashes in property paths. */
|
|
var reEscapeChar = /\\(\\)?/g;
|
|
|
|
/** Used to detect host constructors (Safari). */
|
|
var reIsHostCtor = /^\[object .+?Constructor\]$/;
|
|
|
|
/** Detect free variable `global` from Node.js. */
|
|
var freeGlobal = typeof commonjsGlobal == 'object' && commonjsGlobal && commonjsGlobal.Object === Object && commonjsGlobal;
|
|
|
|
/** Detect free variable `self`. */
|
|
var freeSelf = typeof self == 'object' && self && self.Object === Object && self;
|
|
|
|
/** Used as a reference to the global object. */
|
|
var root = freeGlobal || freeSelf || Function('return this')();
|
|
|
|
/**
|
|
* Gets the value at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} [object] The object to query.
|
|
* @param {string} key The key of the property to get.
|
|
* @returns {*} Returns the property value.
|
|
*/
|
|
function getValue(object, key) {
|
|
return object == null ? undefined : object[key];
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a host object in IE < 9.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a host object, else `false`.
|
|
*/
|
|
function isHostObject(value) {
|
|
// Many host objects are `Object` objects that can coerce to strings
|
|
// despite having improperly defined `toString` methods.
|
|
var result = false;
|
|
if (value != null && typeof value.toString != 'function') {
|
|
try {
|
|
result = !!(value + '');
|
|
} catch (e) {}
|
|
}
|
|
return result;
|
|
}
|
|
|
|
/** Used for built-in method references. */
|
|
var arrayProto = Array.prototype,
|
|
funcProto = Function.prototype,
|
|
objectProto = Object.prototype;
|
|
|
|
/** Used to detect overreaching core-js shims. */
|
|
var coreJsData = root['__core-js_shared__'];
|
|
|
|
/** Used to detect methods masquerading as native. */
|
|
var maskSrcKey = (function() {
|
|
var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || '');
|
|
return uid ? ('Symbol(src)_1.' + uid) : '';
|
|
}());
|
|
|
|
/** Used to resolve the decompiled source of functions. */
|
|
var funcToString = funcProto.toString;
|
|
|
|
/** Used to check objects for own properties. */
|
|
var hasOwnProperty$1 = objectProto.hasOwnProperty;
|
|
|
|
/**
|
|
* Used to resolve the
|
|
* [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring)
|
|
* of values.
|
|
*/
|
|
var objectToString$1 = objectProto.toString;
|
|
|
|
/** Used to detect if a method is native. */
|
|
var reIsNative = RegExp('^' +
|
|
funcToString.call(hasOwnProperty$1).replace(reRegExpChar, '\\$&')
|
|
.replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$'
|
|
);
|
|
|
|
/** Built-in value references. */
|
|
var Symbol$1 = root.Symbol,
|
|
splice = arrayProto.splice;
|
|
|
|
/* Built-in method references that are verified to be native. */
|
|
var Map = getNative(root, 'Map'),
|
|
nativeCreate = getNative(Object, 'create');
|
|
|
|
/** Used to convert symbols to primitives and strings. */
|
|
var symbolProto = Symbol$1 ? Symbol$1.prototype : undefined,
|
|
symbolToString = symbolProto ? symbolProto.toString : undefined;
|
|
|
|
/**
|
|
* Creates a hash object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function Hash(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the hash.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf Hash
|
|
*/
|
|
function hashClear() {
|
|
this.__data__ = nativeCreate ? nativeCreate(null) : {};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the hash.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf Hash
|
|
* @param {Object} hash The hash to modify.
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function hashDelete(key) {
|
|
return this.has(key) && delete this.__data__[key];
|
|
}
|
|
|
|
/**
|
|
* Gets the hash value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function hashGet(key) {
|
|
var data = this.__data__;
|
|
if (nativeCreate) {
|
|
var result = data[key];
|
|
return result === HASH_UNDEFINED ? undefined : result;
|
|
}
|
|
return hasOwnProperty$1.call(data, key) ? data[key] : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if a hash value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function hashHas(key) {
|
|
var data = this.__data__;
|
|
return nativeCreate ? data[key] !== undefined : hasOwnProperty$1.call(data, key);
|
|
}
|
|
|
|
/**
|
|
* Sets the hash `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf Hash
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the hash instance.
|
|
*/
|
|
function hashSet(key, value) {
|
|
var data = this.__data__;
|
|
data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value;
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `Hash`.
|
|
Hash.prototype.clear = hashClear;
|
|
Hash.prototype['delete'] = hashDelete;
|
|
Hash.prototype.get = hashGet;
|
|
Hash.prototype.has = hashHas;
|
|
Hash.prototype.set = hashSet;
|
|
|
|
/**
|
|
* Creates an list cache object.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function ListCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the list cache.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf ListCache
|
|
*/
|
|
function listCacheClear() {
|
|
this.__data__ = [];
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the list cache.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function listCacheDelete(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
return false;
|
|
}
|
|
var lastIndex = data.length - 1;
|
|
if (index == lastIndex) {
|
|
data.pop();
|
|
} else {
|
|
splice.call(data, index, 1);
|
|
}
|
|
return true;
|
|
}
|
|
|
|
/**
|
|
* Gets the list cache value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function listCacheGet(key) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
return index < 0 ? undefined : data[index][1];
|
|
}
|
|
|
|
/**
|
|
* Checks if a list cache value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function listCacheHas(key) {
|
|
return assocIndexOf(this.__data__, key) > -1;
|
|
}
|
|
|
|
/**
|
|
* Sets the list cache `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf ListCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the list cache instance.
|
|
*/
|
|
function listCacheSet(key, value) {
|
|
var data = this.__data__,
|
|
index = assocIndexOf(data, key);
|
|
|
|
if (index < 0) {
|
|
data.push([key, value]);
|
|
} else {
|
|
data[index][1] = value;
|
|
}
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `ListCache`.
|
|
ListCache.prototype.clear = listCacheClear;
|
|
ListCache.prototype['delete'] = listCacheDelete;
|
|
ListCache.prototype.get = listCacheGet;
|
|
ListCache.prototype.has = listCacheHas;
|
|
ListCache.prototype.set = listCacheSet;
|
|
|
|
/**
|
|
* Creates a map cache object to store key-value pairs.
|
|
*
|
|
* @private
|
|
* @constructor
|
|
* @param {Array} [entries] The key-value pairs to cache.
|
|
*/
|
|
function MapCache(entries) {
|
|
var index = -1,
|
|
length = entries ? entries.length : 0;
|
|
|
|
this.clear();
|
|
while (++index < length) {
|
|
var entry = entries[index];
|
|
this.set(entry[0], entry[1]);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Removes all key-value entries from the map.
|
|
*
|
|
* @private
|
|
* @name clear
|
|
* @memberOf MapCache
|
|
*/
|
|
function mapCacheClear() {
|
|
this.__data__ = {
|
|
'hash': new Hash,
|
|
'map': new (Map || ListCache),
|
|
'string': new Hash
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Removes `key` and its value from the map.
|
|
*
|
|
* @private
|
|
* @name delete
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to remove.
|
|
* @returns {boolean} Returns `true` if the entry was removed, else `false`.
|
|
*/
|
|
function mapCacheDelete(key) {
|
|
return getMapData(this, key)['delete'](key);
|
|
}
|
|
|
|
/**
|
|
* Gets the map value for `key`.
|
|
*
|
|
* @private
|
|
* @name get
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to get.
|
|
* @returns {*} Returns the entry value.
|
|
*/
|
|
function mapCacheGet(key) {
|
|
return getMapData(this, key).get(key);
|
|
}
|
|
|
|
/**
|
|
* Checks if a map value for `key` exists.
|
|
*
|
|
* @private
|
|
* @name has
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the entry to check.
|
|
* @returns {boolean} Returns `true` if an entry for `key` exists, else `false`.
|
|
*/
|
|
function mapCacheHas(key) {
|
|
return getMapData(this, key).has(key);
|
|
}
|
|
|
|
/**
|
|
* Sets the map `key` to `value`.
|
|
*
|
|
* @private
|
|
* @name set
|
|
* @memberOf MapCache
|
|
* @param {string} key The key of the value to set.
|
|
* @param {*} value The value to set.
|
|
* @returns {Object} Returns the map cache instance.
|
|
*/
|
|
function mapCacheSet(key, value) {
|
|
getMapData(this, key).set(key, value);
|
|
return this;
|
|
}
|
|
|
|
// Add methods to `MapCache`.
|
|
MapCache.prototype.clear = mapCacheClear;
|
|
MapCache.prototype['delete'] = mapCacheDelete;
|
|
MapCache.prototype.get = mapCacheGet;
|
|
MapCache.prototype.has = mapCacheHas;
|
|
MapCache.prototype.set = mapCacheSet;
|
|
|
|
/**
|
|
* Gets the index at which the `key` is found in `array` of key-value pairs.
|
|
*
|
|
* @private
|
|
* @param {Array} array The array to inspect.
|
|
* @param {*} key The key to search for.
|
|
* @returns {number} Returns the index of the matched value, else `-1`.
|
|
*/
|
|
function assocIndexOf(array, key) {
|
|
var length = array.length;
|
|
while (length--) {
|
|
if (eq(array[length][0], key)) {
|
|
return length;
|
|
}
|
|
}
|
|
return -1;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.get` without support for default values.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {Array|string} path The path of the property to get.
|
|
* @returns {*} Returns the resolved value.
|
|
*/
|
|
function baseGet(object, path) {
|
|
path = isKey(path, object) ? [path] : castPath(path);
|
|
|
|
var index = 0,
|
|
length = path.length;
|
|
|
|
while (object != null && index < length) {
|
|
object = object[toKey(path[index++])];
|
|
}
|
|
return (index && index == length) ? object : undefined;
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.isNative` without bad shim checks.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a native function,
|
|
* else `false`.
|
|
*/
|
|
function baseIsNative(value) {
|
|
if (!isObject$1(value) || isMasked(value)) {
|
|
return false;
|
|
}
|
|
var pattern = (isFunction$1(value) || isHostObject(value)) ? reIsNative : reIsHostCtor;
|
|
return pattern.test(toSource(value));
|
|
}
|
|
|
|
/**
|
|
* The base implementation of `_.toString` which doesn't convert nullish
|
|
* values to empty strings.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
*/
|
|
function baseToString(value) {
|
|
// Exit early for strings to avoid a performance hit in some environments.
|
|
if (typeof value == 'string') {
|
|
return value;
|
|
}
|
|
if (isSymbol(value)) {
|
|
return symbolToString ? symbolToString.call(value) : '';
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Casts `value` to a path array if it's not one.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {Array} Returns the cast property path array.
|
|
*/
|
|
function castPath(value) {
|
|
return isArray$2(value) ? value : stringToPath(value);
|
|
}
|
|
|
|
/**
|
|
* Gets the data for `map`.
|
|
*
|
|
* @private
|
|
* @param {Object} map The map to query.
|
|
* @param {string} key The reference key.
|
|
* @returns {*} Returns the map data.
|
|
*/
|
|
function getMapData(map, key) {
|
|
var data = map.__data__;
|
|
return isKeyable(key)
|
|
? data[typeof key == 'string' ? 'string' : 'hash']
|
|
: data.map;
|
|
}
|
|
|
|
/**
|
|
* Gets the native function at `key` of `object`.
|
|
*
|
|
* @private
|
|
* @param {Object} object The object to query.
|
|
* @param {string} key The key of the method to get.
|
|
* @returns {*} Returns the function if it's native, else `undefined`.
|
|
*/
|
|
function getNative(object, key) {
|
|
var value = getValue(object, key);
|
|
return baseIsNative(value) ? value : undefined;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is a property name and not a property path.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @param {Object} [object] The object to query keys on.
|
|
* @returns {boolean} Returns `true` if `value` is a property name, else `false`.
|
|
*/
|
|
function isKey(value, object) {
|
|
if (isArray$2(value)) {
|
|
return false;
|
|
}
|
|
var type = typeof value;
|
|
if (type == 'number' || type == 'symbol' || type == 'boolean' ||
|
|
value == null || isSymbol(value)) {
|
|
return true;
|
|
}
|
|
return reIsPlainProp.test(value) || !reIsDeepProp.test(value) ||
|
|
(object != null && value in Object(object));
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is suitable for use as unique object key.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is suitable, else `false`.
|
|
*/
|
|
function isKeyable(value) {
|
|
var type = typeof value;
|
|
return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean')
|
|
? (value !== '__proto__')
|
|
: (value === null);
|
|
}
|
|
|
|
/**
|
|
* Checks if `func` has its source masked.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to check.
|
|
* @returns {boolean} Returns `true` if `func` is masked, else `false`.
|
|
*/
|
|
function isMasked(func) {
|
|
return !!maskSrcKey && (maskSrcKey in func);
|
|
}
|
|
|
|
/**
|
|
* Converts `string` to a property path array.
|
|
*
|
|
* @private
|
|
* @param {string} string The string to convert.
|
|
* @returns {Array} Returns the property path array.
|
|
*/
|
|
var stringToPath = memoize(function(string) {
|
|
string = toString$1(string);
|
|
|
|
var result = [];
|
|
if (reLeadingDot.test(string)) {
|
|
result.push('');
|
|
}
|
|
string.replace(rePropName, function(match, number, quote, string) {
|
|
result.push(quote ? string.replace(reEscapeChar, '$1') : (number || match));
|
|
});
|
|
return result;
|
|
});
|
|
|
|
/**
|
|
* Converts `value` to a string key if it's not a string or symbol.
|
|
*
|
|
* @private
|
|
* @param {*} value The value to inspect.
|
|
* @returns {string|symbol} Returns the key.
|
|
*/
|
|
function toKey(value) {
|
|
if (typeof value == 'string' || isSymbol(value)) {
|
|
return value;
|
|
}
|
|
var result = (value + '');
|
|
return (result == '0' && (1 / value) == -INFINITY) ? '-0' : result;
|
|
}
|
|
|
|
/**
|
|
* Converts `func` to its source code.
|
|
*
|
|
* @private
|
|
* @param {Function} func The function to process.
|
|
* @returns {string} Returns the source code.
|
|
*/
|
|
function toSource(func) {
|
|
if (func != null) {
|
|
try {
|
|
return funcToString.call(func);
|
|
} catch (e) {}
|
|
try {
|
|
return (func + '');
|
|
} catch (e) {}
|
|
}
|
|
return '';
|
|
}
|
|
|
|
/**
|
|
* Creates a function that memoizes the result of `func`. If `resolver` is
|
|
* provided, it determines the cache key for storing the result based on the
|
|
* arguments provided to the memoized function. By default, the first argument
|
|
* provided to the memoized function is used as the map cache key. The `func`
|
|
* is invoked with the `this` binding of the memoized function.
|
|
*
|
|
* **Note:** The cache is exposed as the `cache` property on the memoized
|
|
* function. Its creation may be customized by replacing the `_.memoize.Cache`
|
|
* constructor with one whose instances implement the
|
|
* [`Map`](http://ecma-international.org/ecma-262/7.0/#sec-properties-of-the-map-prototype-object)
|
|
* method interface of `delete`, `get`, `has`, and `set`.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Function
|
|
* @param {Function} func The function to have its output memoized.
|
|
* @param {Function} [resolver] The function to resolve the cache key.
|
|
* @returns {Function} Returns the new memoized function.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1, 'b': 2 };
|
|
* var other = { 'c': 3, 'd': 4 };
|
|
*
|
|
* var values = _.memoize(_.values);
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* values(other);
|
|
* // => [3, 4]
|
|
*
|
|
* object.a = 2;
|
|
* values(object);
|
|
* // => [1, 2]
|
|
*
|
|
* // Modify the result cache.
|
|
* values.cache.set(object, ['a', 'b']);
|
|
* values(object);
|
|
* // => ['a', 'b']
|
|
*
|
|
* // Replace `_.memoize.Cache`.
|
|
* _.memoize.Cache = WeakMap;
|
|
*/
|
|
function memoize(func, resolver) {
|
|
if (typeof func != 'function' || (resolver && typeof resolver != 'function')) {
|
|
throw new TypeError(FUNC_ERROR_TEXT);
|
|
}
|
|
var memoized = function() {
|
|
var args = arguments,
|
|
key = resolver ? resolver.apply(this, args) : args[0],
|
|
cache = memoized.cache;
|
|
|
|
if (cache.has(key)) {
|
|
return cache.get(key);
|
|
}
|
|
var result = func.apply(this, args);
|
|
memoized.cache = cache.set(key, result);
|
|
return result;
|
|
};
|
|
memoized.cache = new (memoize.Cache || MapCache);
|
|
return memoized;
|
|
}
|
|
|
|
// Assign cache to `_.memoize`.
|
|
memoize.Cache = MapCache;
|
|
|
|
/**
|
|
* Performs a
|
|
* [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero)
|
|
* comparison between two values to determine if they are equivalent.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to compare.
|
|
* @param {*} other The other value to compare.
|
|
* @returns {boolean} Returns `true` if the values are equivalent, else `false`.
|
|
* @example
|
|
*
|
|
* var object = { 'a': 1 };
|
|
* var other = { 'a': 1 };
|
|
*
|
|
* _.eq(object, object);
|
|
* // => true
|
|
*
|
|
* _.eq(object, other);
|
|
* // => false
|
|
*
|
|
* _.eq('a', 'a');
|
|
* // => true
|
|
*
|
|
* _.eq('a', Object('a'));
|
|
* // => false
|
|
*
|
|
* _.eq(NaN, NaN);
|
|
* // => true
|
|
*/
|
|
function eq(value, other) {
|
|
return value === other || (value !== value && other !== other);
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as an `Array` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an array, else `false`.
|
|
* @example
|
|
*
|
|
* _.isArray([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isArray(document.body.children);
|
|
* // => false
|
|
*
|
|
* _.isArray('abc');
|
|
* // => false
|
|
*
|
|
* _.isArray(_.noop);
|
|
* // => false
|
|
*/
|
|
var isArray$2 = Array.isArray;
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Function` object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a function, else `false`.
|
|
* @example
|
|
*
|
|
* _.isFunction(_);
|
|
* // => true
|
|
*
|
|
* _.isFunction(/abc/);
|
|
* // => false
|
|
*/
|
|
function isFunction$1(value) {
|
|
// The use of `Object#toString` avoids issues with the `typeof` operator
|
|
// in Safari 8-9 which returns 'object' for typed array and other constructors.
|
|
var tag = isObject$1(value) ? objectToString$1.call(value) : '';
|
|
return tag == funcTag || tag == genTag;
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is the
|
|
* [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types)
|
|
* of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`)
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 0.1.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is an object, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObject({});
|
|
* // => true
|
|
*
|
|
* _.isObject([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObject(_.noop);
|
|
* // => true
|
|
*
|
|
* _.isObject(null);
|
|
* // => false
|
|
*/
|
|
function isObject$1(value) {
|
|
var type = typeof value;
|
|
return !!value && (type == 'object' || type == 'function');
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is object-like. A value is object-like if it's not `null`
|
|
* and has a `typeof` result of "object".
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is object-like, else `false`.
|
|
* @example
|
|
*
|
|
* _.isObjectLike({});
|
|
* // => true
|
|
*
|
|
* _.isObjectLike([1, 2, 3]);
|
|
* // => true
|
|
*
|
|
* _.isObjectLike(_.noop);
|
|
* // => false
|
|
*
|
|
* _.isObjectLike(null);
|
|
* // => false
|
|
*/
|
|
function isObjectLike(value) {
|
|
return !!value && typeof value == 'object';
|
|
}
|
|
|
|
/**
|
|
* Checks if `value` is classified as a `Symbol` primitive or object.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to check.
|
|
* @returns {boolean} Returns `true` if `value` is a symbol, else `false`.
|
|
* @example
|
|
*
|
|
* _.isSymbol(Symbol.iterator);
|
|
* // => true
|
|
*
|
|
* _.isSymbol('abc');
|
|
* // => false
|
|
*/
|
|
function isSymbol(value) {
|
|
return typeof value == 'symbol' ||
|
|
(isObjectLike(value) && objectToString$1.call(value) == symbolTag);
|
|
}
|
|
|
|
/**
|
|
* Converts `value` to a string. An empty string is returned for `null`
|
|
* and `undefined` values. The sign of `-0` is preserved.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 4.0.0
|
|
* @category Lang
|
|
* @param {*} value The value to process.
|
|
* @returns {string} Returns the string.
|
|
* @example
|
|
*
|
|
* _.toString(null);
|
|
* // => ''
|
|
*
|
|
* _.toString(-0);
|
|
* // => '-0'
|
|
*
|
|
* _.toString([1, 2, 3]);
|
|
* // => '1,2,3'
|
|
*/
|
|
function toString$1(value) {
|
|
return value == null ? '' : baseToString(value);
|
|
}
|
|
|
|
/**
|
|
* Gets the value at `path` of `object`. If the resolved value is
|
|
* `undefined`, the `defaultValue` is returned in its place.
|
|
*
|
|
* @static
|
|
* @memberOf _
|
|
* @since 3.7.0
|
|
* @category Object
|
|
* @param {Object} object The object to query.
|
|
* @param {Array|string} path The path of the property to get.
|
|
* @param {*} [defaultValue] The value returned for `undefined` resolved values.
|
|
* @returns {*} Returns the resolved value.
|
|
* @example
|
|
*
|
|
* var object = { 'a': [{ 'b': { 'c': 3 } }] };
|
|
*
|
|
* _.get(object, 'a[0].b.c');
|
|
* // => 3
|
|
*
|
|
* _.get(object, ['a', '0', 'b', 'c']);
|
|
* // => 3
|
|
*
|
|
* _.get(object, 'a.b.c', 'default');
|
|
* // => 'default'
|
|
*/
|
|
function get(object, path, defaultValue) {
|
|
var result = object == null ? undefined : baseGet(object, path);
|
|
return result === undefined ? defaultValue : result;
|
|
}
|
|
|
|
var lodash_get = get;
|
|
|
|
function getProp(obj, path, defaultValue) {
|
|
return obj[path] === undefined ? defaultValue : obj[path];
|
|
}
|
|
|
|
function setProp(obj, path, value) {
|
|
var pathArray = Array.isArray(path) ? path : path.split('.');
|
|
|
|
var _pathArray = _toArray(pathArray),
|
|
key = _pathArray[0],
|
|
restPath = _pathArray.slice(1);
|
|
|
|
var newValue = pathArray.length > 1 ? setProp(obj[key] || {}, restPath, value) : value;
|
|
return Object.assign({}, obj, _defineProperty({}, key, newValue));
|
|
}
|
|
|
|
function unsetProp(obj, path) {
|
|
var pathArray = Array.isArray(path) ? path : path.split('.');
|
|
|
|
var _pathArray2 = _toArray(pathArray),
|
|
key = _pathArray2[0],
|
|
restPath = _pathArray2.slice(1);
|
|
|
|
if (_typeof(obj[key]) !== 'object') {
|
|
// This will never be hit in the current code because unwind does the check before calling unsetProp
|
|
|
|
/* istanbul ignore next */
|
|
return obj;
|
|
}
|
|
|
|
if (pathArray.length === 1) {
|
|
return Object.keys(obj).filter(function (prop) {
|
|
return prop !== key;
|
|
}).reduce(function (acc, prop) {
|
|
return Object.assign(acc, _defineProperty({}, prop, obj[prop]));
|
|
}, {});
|
|
}
|
|
|
|
return unsetProp(obj[key], restPath);
|
|
}
|
|
|
|
function flattenReducer(acc, arr) {
|
|
try {
|
|
// This is faster but susceptible to `RangeError: Maximum call stack size exceeded`
|
|
acc.push.apply(acc, _toConsumableArray(arr));
|
|
return acc;
|
|
} catch (err) {
|
|
// Fallback to a slower but safer option
|
|
return acc.concat(arr);
|
|
}
|
|
}
|
|
|
|
function fastJoin(arr, separator) {
|
|
var isFirst = true;
|
|
return arr.reduce(function (acc, elem) {
|
|
if (elem === null || elem === undefined) {
|
|
elem = '';
|
|
}
|
|
|
|
if (isFirst) {
|
|
isFirst = false;
|
|
return "".concat(elem);
|
|
}
|
|
|
|
return "".concat(acc).concat(separator).concat(elem);
|
|
}, '');
|
|
}
|
|
|
|
var utils = {
|
|
getProp: getProp,
|
|
setProp: setProp,
|
|
unsetProp: unsetProp,
|
|
fastJoin: fastJoin,
|
|
flattenReducer: flattenReducer
|
|
};
|
|
|
|
var getProp$1 = utils.getProp,
|
|
fastJoin$1 = utils.fastJoin,
|
|
flattenReducer$1 = utils.flattenReducer;
|
|
|
|
var JSON2CSVBase =
|
|
/*#__PURE__*/
|
|
function () {
|
|
function JSON2CSVBase(opts) {
|
|
_classCallCheck(this, JSON2CSVBase);
|
|
|
|
this.opts = this.preprocessOpts(opts);
|
|
}
|
|
/**
|
|
* Check passing opts and set defaults.
|
|
*
|
|
* @param {Json2CsvOptions} opts Options object containing fields,
|
|
* delimiter, default value, quote mark, header, etc.
|
|
*/
|
|
|
|
|
|
_createClass(JSON2CSVBase, [{
|
|
key: "preprocessOpts",
|
|
value: function preprocessOpts(opts) {
|
|
var processedOpts = Object.assign({}, opts);
|
|
processedOpts.transforms = !Array.isArray(processedOpts.transforms) ? processedOpts.transforms ? [processedOpts.transforms] : [] : processedOpts.transforms;
|
|
processedOpts.delimiter = processedOpts.delimiter || ',';
|
|
processedOpts.eol = processedOpts.eol || os.EOL;
|
|
processedOpts.quote = typeof processedOpts.quote === 'string' ? processedOpts.quote : '"';
|
|
processedOpts.escapedQuote = typeof processedOpts.escapedQuote === 'string' ? processedOpts.escapedQuote : "".concat(processedOpts.quote).concat(processedOpts.quote);
|
|
processedOpts.header = processedOpts.header !== false;
|
|
processedOpts.includeEmptyRows = processedOpts.includeEmptyRows || false;
|
|
processedOpts.withBOM = processedOpts.withBOM || false;
|
|
return processedOpts;
|
|
}
|
|
/**
|
|
* Check and normalize the fields configuration.
|
|
*
|
|
* @param {(string|object)[]} fields Fields configuration provided by the user
|
|
* or inferred from the data
|
|
* @returns {object[]} preprocessed FieldsInfo array
|
|
*/
|
|
|
|
}, {
|
|
key: "preprocessFieldsInfo",
|
|
value: function preprocessFieldsInfo(fields) {
|
|
var _this = this;
|
|
|
|
return fields.map(function (fieldInfo) {
|
|
if (typeof fieldInfo === 'string') {
|
|
return {
|
|
label: fieldInfo,
|
|
value: fieldInfo.includes('.') || fieldInfo.includes('[') ? function (row) {
|
|
return lodash_get(row, fieldInfo, _this.opts.defaultValue);
|
|
} : function (row) {
|
|
return getProp$1(row, fieldInfo, _this.opts.defaultValue);
|
|
}
|
|
};
|
|
}
|
|
|
|
if (_typeof(fieldInfo) === 'object') {
|
|
var defaultValue = 'default' in fieldInfo ? fieldInfo.default : _this.opts.defaultValue;
|
|
|
|
if (typeof fieldInfo.value === 'string') {
|
|
return {
|
|
label: fieldInfo.label || fieldInfo.value,
|
|
value: fieldInfo.value.includes('.') || fieldInfo.value.includes('[') ? function (row) {
|
|
return lodash_get(row, fieldInfo.value, defaultValue);
|
|
} : function (row) {
|
|
return getProp$1(row, fieldInfo.value, defaultValue);
|
|
}
|
|
};
|
|
}
|
|
|
|
if (typeof fieldInfo.value === 'function') {
|
|
var label = fieldInfo.label || fieldInfo.value.name || '';
|
|
var field = {
|
|
label: label,
|
|
default: defaultValue
|
|
};
|
|
return {
|
|
label: label,
|
|
value: function value(row) {
|
|
var value = fieldInfo.value(row, field);
|
|
return value === null || value === undefined ? defaultValue : value;
|
|
}
|
|
};
|
|
}
|
|
}
|
|
|
|
throw new Error('Invalid field info option. ' + JSON.stringify(fieldInfo));
|
|
});
|
|
}
|
|
/**
|
|
* Create the title row with all the provided fields as column headings
|
|
*
|
|
* @returns {String} titles as a string
|
|
*/
|
|
|
|
}, {
|
|
key: "getHeader",
|
|
value: function getHeader() {
|
|
var _this2 = this;
|
|
|
|
return fastJoin$1(this.opts.fields.map(function (fieldInfo) {
|
|
return _this2.processValue(fieldInfo.label);
|
|
}), this.opts.delimiter);
|
|
}
|
|
/**
|
|
* Preprocess each object according to the given transforms (unwind, flatten, etc.).
|
|
* @param {Object} row JSON object to be converted in a CSV row
|
|
*/
|
|
|
|
}, {
|
|
key: "preprocessRow",
|
|
value: function preprocessRow(row) {
|
|
return this.opts.transforms.reduce(function (rows, transform) {
|
|
return rows.map(function (row) {
|
|
return transform(row);
|
|
}).reduce(flattenReducer$1, []);
|
|
}, [row]);
|
|
}
|
|
/**
|
|
* Create the content of a specific CSV row
|
|
*
|
|
* @param {Object} row JSON object to be converted in a CSV row
|
|
* @returns {String} CSV string (row)
|
|
*/
|
|
|
|
}, {
|
|
key: "processRow",
|
|
value: function processRow(row) {
|
|
var _this3 = this;
|
|
|
|
if (!row) {
|
|
return undefined;
|
|
}
|
|
|
|
var processedRow = this.opts.fields.map(function (fieldInfo) {
|
|
return _this3.processCell(row, fieldInfo);
|
|
});
|
|
|
|
if (!this.opts.includeEmptyRows && processedRow.every(function (field) {
|
|
return field === undefined;
|
|
})) {
|
|
return undefined;
|
|
}
|
|
|
|
return fastJoin$1(processedRow, this.opts.delimiter);
|
|
}
|
|
/**
|
|
* Create the content of a specfic CSV row cell
|
|
*
|
|
* @param {Object} row JSON object representing the CSV row that the cell belongs to
|
|
* @param {FieldInfo} fieldInfo Details of the field to process to be a CSV cell
|
|
* @returns {String} CSV string (cell)
|
|
*/
|
|
|
|
}, {
|
|
key: "processCell",
|
|
value: function processCell(row, fieldInfo) {
|
|
return this.processValue(fieldInfo.value(row));
|
|
}
|
|
/**
|
|
* Create the content of a specfic CSV row cell
|
|
*
|
|
* @param {Any} value Value to be included in a CSV cell
|
|
* @returns {String} Value stringified and processed
|
|
*/
|
|
|
|
}, {
|
|
key: "processValue",
|
|
value: function processValue(value) {
|
|
if (value === null || value === undefined) {
|
|
return undefined;
|
|
}
|
|
|
|
var valueType = _typeof(value);
|
|
|
|
if (valueType !== 'boolean' && valueType !== 'number' && valueType !== 'string') {
|
|
value = JSON.stringify(value);
|
|
|
|
if (value === undefined) {
|
|
return undefined;
|
|
}
|
|
|
|
if (value[0] === '"') {
|
|
value = value.replace(/^"(.+)"$/, '$1');
|
|
}
|
|
}
|
|
|
|
if (typeof value === 'string') {
|
|
if (value.includes(this.opts.quote)) {
|
|
value = value.replace(new RegExp(this.opts.quote, 'g'), this.opts.escapedQuote);
|
|
}
|
|
|
|
value = "".concat(this.opts.quote).concat(value).concat(this.opts.quote);
|
|
|
|
if (this.opts.excelStrings) {
|
|
value = "\"=\"".concat(value, "\"\"");
|
|
}
|
|
}
|
|
|
|
return value;
|
|
}
|
|
}]);
|
|
|
|
return JSON2CSVBase;
|
|
}();
|
|
|
|
var JSON2CSVBase_1 = JSON2CSVBase;
|
|
|
|
var fastJoin$2 = utils.fastJoin,
|
|
flattenReducer$2 = utils.flattenReducer;
|
|
|
|
var JSON2CSVParser =
|
|
/*#__PURE__*/
|
|
function (_JSON2CSVBase) {
|
|
_inherits(JSON2CSVParser, _JSON2CSVBase);
|
|
|
|
function JSON2CSVParser(opts) {
|
|
var _this;
|
|
|
|
_classCallCheck(this, JSON2CSVParser);
|
|
|
|
_this = _possibleConstructorReturn(this, _getPrototypeOf(JSON2CSVParser).call(this, opts));
|
|
|
|
if (_this.opts.fields) {
|
|
_this.opts.fields = _this.preprocessFieldsInfo(_this.opts.fields);
|
|
}
|
|
|
|
return _this;
|
|
}
|
|
/**
|
|
* Main function that converts json to csv.
|
|
*
|
|
* @param {Array|Object} data Array of JSON objects to be converted to CSV
|
|
* @returns {String} The CSV formated data as a string
|
|
*/
|
|
|
|
|
|
_createClass(JSON2CSVParser, [{
|
|
key: "parse",
|
|
value: function parse(data) {
|
|
var processedData = this.preprocessData(data);
|
|
|
|
if (!this.opts.fields) {
|
|
this.opts.fields = processedData.reduce(function (fields, item) {
|
|
Object.keys(item).forEach(function (field) {
|
|
if (!fields.includes(field)) {
|
|
fields.push(field);
|
|
}
|
|
});
|
|
return fields;
|
|
}, []);
|
|
this.opts.fields = this.preprocessFieldsInfo(this.opts.fields);
|
|
}
|
|
|
|
var header = this.opts.header ? this.getHeader() : '';
|
|
var rows = this.processData(processedData);
|
|
var csv = (this.opts.withBOM ? "\uFEFF" : '') + header + (header && rows ? this.opts.eol : '') + rows;
|
|
return csv;
|
|
}
|
|
/**
|
|
* Preprocess the data according to the give opts (unwind, flatten, etc.)
|
|
and calculate the fields and field names if they are not provided.
|
|
*
|
|
* @param {Array|Object} data Array or object to be converted to CSV
|
|
*/
|
|
|
|
}, {
|
|
key: "preprocessData",
|
|
value: function preprocessData(data) {
|
|
var _this2 = this;
|
|
|
|
var processedData = Array.isArray(data) ? data : [data];
|
|
|
|
if (!this.opts.fields && (processedData.length === 0 || _typeof(processedData[0]) !== 'object')) {
|
|
throw new Error('Data should not be empty or the "fields" option should be included');
|
|
}
|
|
|
|
if (this.opts.transforms.length === 0) return processedData;
|
|
return processedData.map(function (row) {
|
|
return _this2.preprocessRow(row);
|
|
}).reduce(flattenReducer$2, []);
|
|
}
|
|
/**
|
|
* Create the content row by row below the header
|
|
*
|
|
* @param {Array} data Array of JSON objects to be converted to CSV
|
|
* @returns {String} CSV string (body)
|
|
*/
|
|
|
|
}, {
|
|
key: "processData",
|
|
value: function processData(data) {
|
|
var _this3 = this;
|
|
|
|
return fastJoin$2(data.map(function (row) {
|
|
return _this3.processRow(row);
|
|
}).filter(function (row) {
|
|
return row;
|
|
}), // Filter empty rows
|
|
this.opts.eol);
|
|
}
|
|
}]);
|
|
|
|
return JSON2CSVParser;
|
|
}(JSON2CSVBase_1);
|
|
|
|
var JSON2CSVParser_1 = JSON2CSVParser;
|
|
|
|
/*global Buffer*/
|
|
// Named constants with unique integer values
|
|
var C = {};
|
|
// Tokens
|
|
var LEFT_BRACE = C.LEFT_BRACE = 0x1;
|
|
var RIGHT_BRACE = C.RIGHT_BRACE = 0x2;
|
|
var LEFT_BRACKET = C.LEFT_BRACKET = 0x3;
|
|
var RIGHT_BRACKET = C.RIGHT_BRACKET = 0x4;
|
|
var COLON = C.COLON = 0x5;
|
|
var COMMA = C.COMMA = 0x6;
|
|
var TRUE = C.TRUE = 0x7;
|
|
var FALSE = C.FALSE = 0x8;
|
|
var NULL = C.NULL = 0x9;
|
|
var STRING = C.STRING = 0xa;
|
|
var NUMBER = C.NUMBER = 0xb;
|
|
// Tokenizer States
|
|
var START = C.START = 0x11;
|
|
var STOP = C.STOP = 0x12;
|
|
var TRUE1 = C.TRUE1 = 0x21;
|
|
var TRUE2 = C.TRUE2 = 0x22;
|
|
var TRUE3 = C.TRUE3 = 0x23;
|
|
var FALSE1 = C.FALSE1 = 0x31;
|
|
var FALSE2 = C.FALSE2 = 0x32;
|
|
var FALSE3 = C.FALSE3 = 0x33;
|
|
var FALSE4 = C.FALSE4 = 0x34;
|
|
var NULL1 = C.NULL1 = 0x41;
|
|
var NULL2 = C.NULL2 = 0x42;
|
|
var NULL3 = C.NULL3 = 0x43;
|
|
var NUMBER1 = C.NUMBER1 = 0x51;
|
|
var NUMBER3 = C.NUMBER3 = 0x53;
|
|
var STRING1 = C.STRING1 = 0x61;
|
|
var STRING2 = C.STRING2 = 0x62;
|
|
var STRING3 = C.STRING3 = 0x63;
|
|
var STRING4 = C.STRING4 = 0x64;
|
|
var STRING5 = C.STRING5 = 0x65;
|
|
var STRING6 = C.STRING6 = 0x66;
|
|
// Parser States
|
|
var VALUE = C.VALUE = 0x71;
|
|
var KEY = C.KEY = 0x72;
|
|
// Parser Modes
|
|
var OBJECT = C.OBJECT = 0x81;
|
|
var ARRAY = C.ARRAY = 0x82;
|
|
// Character constants
|
|
var BACK_SLASH = "\\".charCodeAt(0);
|
|
var FORWARD_SLASH = "\/".charCodeAt(0);
|
|
var BACKSPACE = "\b".charCodeAt(0);
|
|
var FORM_FEED = "\f".charCodeAt(0);
|
|
var NEWLINE = "\n".charCodeAt(0);
|
|
var CARRIAGE_RETURN = "\r".charCodeAt(0);
|
|
var TAB = "\t".charCodeAt(0);
|
|
|
|
var STRING_BUFFER_SIZE = 64 * 1024;
|
|
|
|
function Parser() {
|
|
this.tState = START;
|
|
this.value = undefined;
|
|
|
|
this.string = undefined; // string data
|
|
this.stringBuffer = Buffer.alloc ? Buffer.alloc(STRING_BUFFER_SIZE) : new Buffer(STRING_BUFFER_SIZE);
|
|
this.stringBufferOffset = 0;
|
|
this.unicode = undefined; // unicode escapes
|
|
this.highSurrogate = undefined;
|
|
|
|
this.key = undefined;
|
|
this.mode = undefined;
|
|
this.stack = [];
|
|
this.state = VALUE;
|
|
this.bytes_remaining = 0; // number of bytes remaining in multi byte utf8 char to read after split boundary
|
|
this.bytes_in_sequence = 0; // bytes in multi byte utf8 char to read
|
|
this.temp_buffs = { "2": new Buffer(2), "3": new Buffer(3), "4": new Buffer(4) }; // for rebuilding chars split before boundary is reached
|
|
|
|
// Stream offset
|
|
this.offset = -1;
|
|
}
|
|
|
|
// Slow code to string converter (only used when throwing syntax errors)
|
|
Parser.toknam = function (code) {
|
|
var keys = Object.keys(C);
|
|
for (var i = 0, l = keys.length; i < l; i++) {
|
|
var key = keys[i];
|
|
if (C[key] === code) { return key; }
|
|
}
|
|
return code && ("0x" + code.toString(16));
|
|
};
|
|
|
|
var proto = Parser.prototype;
|
|
proto.onError = function (err) { throw err; };
|
|
proto.charError = function (buffer, i) {
|
|
this.tState = STOP;
|
|
this.onError(new Error("Unexpected " + JSON.stringify(String.fromCharCode(buffer[i])) + " at position " + i + " in state " + Parser.toknam(this.tState)));
|
|
};
|
|
proto.appendStringChar = function (char) {
|
|
if (this.stringBufferOffset >= STRING_BUFFER_SIZE) {
|
|
this.string += this.stringBuffer.toString('utf8');
|
|
this.stringBufferOffset = 0;
|
|
}
|
|
|
|
this.stringBuffer[this.stringBufferOffset++] = char;
|
|
};
|
|
proto.appendStringBuf = function (buf, start, end) {
|
|
var size = buf.length;
|
|
if (typeof start === 'number') {
|
|
if (typeof end === 'number') {
|
|
if (end < 0) {
|
|
// adding a negative end decreeses the size
|
|
size = buf.length - start + end;
|
|
} else {
|
|
size = end - start;
|
|
}
|
|
} else {
|
|
size = buf.length - start;
|
|
}
|
|
}
|
|
|
|
if (size < 0) {
|
|
size = 0;
|
|
}
|
|
|
|
if (this.stringBufferOffset + size > STRING_BUFFER_SIZE) {
|
|
this.string += this.stringBuffer.toString('utf8', 0, this.stringBufferOffset);
|
|
this.stringBufferOffset = 0;
|
|
}
|
|
|
|
buf.copy(this.stringBuffer, this.stringBufferOffset, start, end);
|
|
this.stringBufferOffset += size;
|
|
};
|
|
proto.write = function (buffer) {
|
|
if (typeof buffer === "string") buffer = new Buffer(buffer);
|
|
var n;
|
|
for (var i = 0, l = buffer.length; i < l; i++) {
|
|
if (this.tState === START){
|
|
n = buffer[i];
|
|
this.offset++;
|
|
if(n === 0x7b){ this.onToken(LEFT_BRACE, "{"); // {
|
|
}else if(n === 0x7d){ this.onToken(RIGHT_BRACE, "}"); // }
|
|
}else if(n === 0x5b){ this.onToken(LEFT_BRACKET, "["); // [
|
|
}else if(n === 0x5d){ this.onToken(RIGHT_BRACKET, "]"); // ]
|
|
}else if(n === 0x3a){ this.onToken(COLON, ":"); // :
|
|
}else if(n === 0x2c){ this.onToken(COMMA, ","); // ,
|
|
}else if(n === 0x74){ this.tState = TRUE1; // t
|
|
}else if(n === 0x66){ this.tState = FALSE1; // f
|
|
}else if(n === 0x6e){ this.tState = NULL1; // n
|
|
}else if(n === 0x22){ // "
|
|
this.string = "";
|
|
this.stringBufferOffset = 0;
|
|
this.tState = STRING1;
|
|
}else if(n === 0x2d){ this.string = "-"; this.tState = NUMBER1; // -
|
|
}else{
|
|
if (n >= 0x30 && n < 0x40) { // 1-9
|
|
this.string = String.fromCharCode(n); this.tState = NUMBER3;
|
|
} else if (n === 0x20 || n === 0x09 || n === 0x0a || n === 0x0d) ; else {
|
|
return this.charError(buffer, i);
|
|
}
|
|
}
|
|
}else if (this.tState === STRING1){ // After open quote
|
|
n = buffer[i]; // get current byte from buffer
|
|
// check for carry over of a multi byte char split between data chunks
|
|
// & fill temp buffer it with start of this data chunk up to the boundary limit set in the last iteration
|
|
if (this.bytes_remaining > 0) {
|
|
for (var j = 0; j < this.bytes_remaining; j++) {
|
|
this.temp_buffs[this.bytes_in_sequence][this.bytes_in_sequence - this.bytes_remaining + j] = buffer[j];
|
|
}
|
|
|
|
this.appendStringBuf(this.temp_buffs[this.bytes_in_sequence]);
|
|
this.bytes_in_sequence = this.bytes_remaining = 0;
|
|
i = i + j - 1;
|
|
} else if (this.bytes_remaining === 0 && n >= 128) { // else if no remainder bytes carried over, parse multi byte (>=128) chars one at a time
|
|
if (n <= 193 || n > 244) {
|
|
return this.onError(new Error("Invalid UTF-8 character at position " + i + " in state " + Parser.toknam(this.tState)));
|
|
}
|
|
if ((n >= 194) && (n <= 223)) this.bytes_in_sequence = 2;
|
|
if ((n >= 224) && (n <= 239)) this.bytes_in_sequence = 3;
|
|
if ((n >= 240) && (n <= 244)) this.bytes_in_sequence = 4;
|
|
if ((this.bytes_in_sequence + i) > buffer.length) { // if bytes needed to complete char fall outside buffer length, we have a boundary split
|
|
for (var k = 0; k <= (buffer.length - 1 - i); k++) {
|
|
this.temp_buffs[this.bytes_in_sequence][k] = buffer[i + k]; // fill temp buffer of correct size with bytes available in this chunk
|
|
}
|
|
this.bytes_remaining = (i + this.bytes_in_sequence) - buffer.length;
|
|
i = buffer.length - 1;
|
|
} else {
|
|
this.appendStringBuf(buffer, i, i + this.bytes_in_sequence);
|
|
i = i + this.bytes_in_sequence - 1;
|
|
}
|
|
} else if (n === 0x22) {
|
|
this.tState = START;
|
|
this.string += this.stringBuffer.toString('utf8', 0, this.stringBufferOffset);
|
|
this.stringBufferOffset = 0;
|
|
this.onToken(STRING, this.string);
|
|
this.offset += Buffer.byteLength(this.string, 'utf8') + 1;
|
|
this.string = undefined;
|
|
}
|
|
else if (n === 0x5c) {
|
|
this.tState = STRING2;
|
|
}
|
|
else if (n >= 0x20) { this.appendStringChar(n); }
|
|
else {
|
|
return this.charError(buffer, i);
|
|
}
|
|
}else if (this.tState === STRING2){ // After backslash
|
|
n = buffer[i];
|
|
if(n === 0x22){ this.appendStringChar(n); this.tState = STRING1;
|
|
}else if(n === 0x5c){ this.appendStringChar(BACK_SLASH); this.tState = STRING1;
|
|
}else if(n === 0x2f){ this.appendStringChar(FORWARD_SLASH); this.tState = STRING1;
|
|
}else if(n === 0x62){ this.appendStringChar(BACKSPACE); this.tState = STRING1;
|
|
}else if(n === 0x66){ this.appendStringChar(FORM_FEED); this.tState = STRING1;
|
|
}else if(n === 0x6e){ this.appendStringChar(NEWLINE); this.tState = STRING1;
|
|
}else if(n === 0x72){ this.appendStringChar(CARRIAGE_RETURN); this.tState = STRING1;
|
|
}else if(n === 0x74){ this.appendStringChar(TAB); this.tState = STRING1;
|
|
}else if(n === 0x75){ this.unicode = ""; this.tState = STRING3;
|
|
}else{
|
|
return this.charError(buffer, i);
|
|
}
|
|
}else if (this.tState === STRING3 || this.tState === STRING4 || this.tState === STRING5 || this.tState === STRING6){ // unicode hex codes
|
|
n = buffer[i];
|
|
// 0-9 A-F a-f
|
|
if ((n >= 0x30 && n < 0x40) || (n > 0x40 && n <= 0x46) || (n > 0x60 && n <= 0x66)) {
|
|
this.unicode += String.fromCharCode(n);
|
|
if (this.tState++ === STRING6) {
|
|
var intVal = parseInt(this.unicode, 16);
|
|
this.unicode = undefined;
|
|
if (this.highSurrogate !== undefined && intVal >= 0xDC00 && intVal < (0xDFFF + 1)) { //<56320,57343> - lowSurrogate
|
|
this.appendStringBuf(new Buffer(String.fromCharCode(this.highSurrogate, intVal)));
|
|
this.highSurrogate = undefined;
|
|
} else if (this.highSurrogate === undefined && intVal >= 0xD800 && intVal < (0xDBFF + 1)) { //<55296,56319> - highSurrogate
|
|
this.highSurrogate = intVal;
|
|
} else {
|
|
if (this.highSurrogate !== undefined) {
|
|
this.appendStringBuf(new Buffer(String.fromCharCode(this.highSurrogate)));
|
|
this.highSurrogate = undefined;
|
|
}
|
|
this.appendStringBuf(new Buffer(String.fromCharCode(intVal)));
|
|
}
|
|
this.tState = STRING1;
|
|
}
|
|
} else {
|
|
return this.charError(buffer, i);
|
|
}
|
|
} else if (this.tState === NUMBER1 || this.tState === NUMBER3) {
|
|
n = buffer[i];
|
|
|
|
switch (n) {
|
|
case 0x30: // 0
|
|
case 0x31: // 1
|
|
case 0x32: // 2
|
|
case 0x33: // 3
|
|
case 0x34: // 4
|
|
case 0x35: // 5
|
|
case 0x36: // 6
|
|
case 0x37: // 7
|
|
case 0x38: // 8
|
|
case 0x39: // 9
|
|
case 0x2e: // .
|
|
case 0x65: // e
|
|
case 0x45: // E
|
|
case 0x2b: // +
|
|
case 0x2d: // -
|
|
this.string += String.fromCharCode(n);
|
|
this.tState = NUMBER3;
|
|
break;
|
|
default:
|
|
this.tState = START;
|
|
var result = Number(this.string);
|
|
|
|
if (isNaN(result)){
|
|
return this.charError(buffer, i);
|
|
}
|
|
|
|
if ((this.string.match(/[0-9]+/) == this.string) && (result.toString() != this.string)) {
|
|
// Long string of digits which is an ID string and not valid and/or safe JavaScript integer Number
|
|
this.onToken(STRING, this.string);
|
|
} else {
|
|
this.onToken(NUMBER, result);
|
|
}
|
|
|
|
this.offset += this.string.length - 1;
|
|
this.string = undefined;
|
|
i--;
|
|
break;
|
|
}
|
|
}else if (this.tState === TRUE1){ // r
|
|
if (buffer[i] === 0x72) { this.tState = TRUE2; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === TRUE2){ // u
|
|
if (buffer[i] === 0x75) { this.tState = TRUE3; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === TRUE3){ // e
|
|
if (buffer[i] === 0x65) { this.tState = START; this.onToken(TRUE, true); this.offset+= 3; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === FALSE1){ // a
|
|
if (buffer[i] === 0x61) { this.tState = FALSE2; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === FALSE2){ // l
|
|
if (buffer[i] === 0x6c) { this.tState = FALSE3; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === FALSE3){ // s
|
|
if (buffer[i] === 0x73) { this.tState = FALSE4; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === FALSE4){ // e
|
|
if (buffer[i] === 0x65) { this.tState = START; this.onToken(FALSE, false); this.offset+= 4; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === NULL1){ // u
|
|
if (buffer[i] === 0x75) { this.tState = NULL2; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === NULL2){ // l
|
|
if (buffer[i] === 0x6c) { this.tState = NULL3; }
|
|
else { return this.charError(buffer, i); }
|
|
}else if (this.tState === NULL3){ // l
|
|
if (buffer[i] === 0x6c) { this.tState = START; this.onToken(NULL, null); this.offset += 3; }
|
|
else { return this.charError(buffer, i); }
|
|
}
|
|
}
|
|
};
|
|
proto.onToken = function (token, value) {
|
|
// Override this to get events
|
|
};
|
|
|
|
proto.parseError = function (token, value) {
|
|
this.tState = STOP;
|
|
this.onError(new Error("Unexpected " + Parser.toknam(token) + (value ? ("(" + JSON.stringify(value) + ")") : "") + " in state " + Parser.toknam(this.state)));
|
|
};
|
|
proto.push = function () {
|
|
this.stack.push({value: this.value, key: this.key, mode: this.mode});
|
|
};
|
|
proto.pop = function () {
|
|
var value = this.value;
|
|
var parent = this.stack.pop();
|
|
this.value = parent.value;
|
|
this.key = parent.key;
|
|
this.mode = parent.mode;
|
|
this.emit(value);
|
|
if (!this.mode) { this.state = VALUE; }
|
|
};
|
|
proto.emit = function (value) {
|
|
if (this.mode) { this.state = COMMA; }
|
|
this.onValue(value);
|
|
};
|
|
proto.onValue = function (value) {
|
|
// Override me
|
|
};
|
|
proto.onToken = function (token, value) {
|
|
if(this.state === VALUE){
|
|
if(token === STRING || token === NUMBER || token === TRUE || token === FALSE || token === NULL){
|
|
if (this.value) {
|
|
this.value[this.key] = value;
|
|
}
|
|
this.emit(value);
|
|
}else if(token === LEFT_BRACE){
|
|
this.push();
|
|
if (this.value) {
|
|
this.value = this.value[this.key] = {};
|
|
} else {
|
|
this.value = {};
|
|
}
|
|
this.key = undefined;
|
|
this.state = KEY;
|
|
this.mode = OBJECT;
|
|
}else if(token === LEFT_BRACKET){
|
|
this.push();
|
|
if (this.value) {
|
|
this.value = this.value[this.key] = [];
|
|
} else {
|
|
this.value = [];
|
|
}
|
|
this.key = 0;
|
|
this.mode = ARRAY;
|
|
this.state = VALUE;
|
|
}else if(token === RIGHT_BRACE){
|
|
if (this.mode === OBJECT) {
|
|
this.pop();
|
|
} else {
|
|
return this.parseError(token, value);
|
|
}
|
|
}else if(token === RIGHT_BRACKET){
|
|
if (this.mode === ARRAY) {
|
|
this.pop();
|
|
} else {
|
|
return this.parseError(token, value);
|
|
}
|
|
}else{
|
|
return this.parseError(token, value);
|
|
}
|
|
}else if(this.state === KEY){
|
|
if (token === STRING) {
|
|
this.key = value;
|
|
this.state = COLON;
|
|
} else if (token === RIGHT_BRACE) {
|
|
this.pop();
|
|
} else {
|
|
return this.parseError(token, value);
|
|
}
|
|
}else if(this.state === COLON){
|
|
if (token === COLON) { this.state = VALUE; }
|
|
else { return this.parseError(token, value); }
|
|
}else if(this.state === COMMA){
|
|
if (token === COMMA) {
|
|
if (this.mode === ARRAY) { this.key++; this.state = VALUE; }
|
|
else if (this.mode === OBJECT) { this.state = KEY; }
|
|
|
|
} else if (token === RIGHT_BRACKET && this.mode === ARRAY || token === RIGHT_BRACE && this.mode === OBJECT) {
|
|
this.pop();
|
|
} else {
|
|
return this.parseError(token, value);
|
|
}
|
|
}else{
|
|
return this.parseError(token, value);
|
|
}
|
|
};
|
|
|
|
Parser.C = C;
|
|
|
|
var jsonparse = Parser;
|
|
|
|
var Transform$1 = Stream.Transform;
|
|
|
|
var JSON2CSVTransform =
|
|
/*#__PURE__*/
|
|
function (_Transform) {
|
|
_inherits(JSON2CSVTransform, _Transform);
|
|
|
|
function JSON2CSVTransform(opts, transformOpts) {
|
|
var _this;
|
|
|
|
_classCallCheck(this, JSON2CSVTransform);
|
|
|
|
_this = _possibleConstructorReturn(this, _getPrototypeOf(JSON2CSVTransform).call(this, transformOpts)); // Inherit methods from JSON2CSVBase since extends doesn't
|
|
// allow multiple inheritance and manually preprocess opts
|
|
|
|
Object.getOwnPropertyNames(JSON2CSVBase_1.prototype).forEach(function (key) {
|
|
return _this[key] = JSON2CSVBase_1.prototype[key];
|
|
});
|
|
_this.opts = _this.preprocessOpts(opts);
|
|
_this._data = '';
|
|
_this._hasWritten = false;
|
|
|
|
if (_this._readableState.objectMode) {
|
|
_this.initObjectModeParse();
|
|
} else if (_this.opts.ndjson) {
|
|
_this.initNDJSONParse();
|
|
} else {
|
|
_this.initJSONParser();
|
|
}
|
|
|
|
if (_this.opts.withBOM) {
|
|
_this.push("\uFEFF");
|
|
}
|
|
|
|
if (_this.opts.fields) {
|
|
_this.opts.fields = _this.preprocessFieldsInfo(_this.opts.fields);
|
|
|
|
_this.pushHeader();
|
|
}
|
|
|
|
return _this;
|
|
}
|
|
/**
|
|
* Init the transform with a parser to process data in object mode.
|
|
* It receives JSON objects one by one and send them to `pushLine for processing.
|
|
*/
|
|
|
|
|
|
_createClass(JSON2CSVTransform, [{
|
|
key: "initObjectModeParse",
|
|
value: function initObjectModeParse() {
|
|
var transform = this;
|
|
this.parser = {
|
|
write: function write(line) {
|
|
transform.pushLine(line);
|
|
},
|
|
getPendingData: function getPendingData() {
|
|
return undefined;
|
|
}
|
|
};
|
|
}
|
|
/**
|
|
* Init the transform with a parser to process NDJSON data.
|
|
* It maintains a buffer of received data, parses each line
|
|
* as JSON and send it to `pushLine for processing.
|
|
*/
|
|
|
|
}, {
|
|
key: "initNDJSONParse",
|
|
value: function initNDJSONParse() {
|
|
var transform = this;
|
|
this.parser = {
|
|
_data: '',
|
|
write: function write(chunk) {
|
|
this._data += chunk.toString();
|
|
|
|
var lines = this._data.split('\n').map(function (line) {
|
|
return line.trim();
|
|
}).filter(function (line) {
|
|
return line !== '';
|
|
});
|
|
|
|
var pendingData = false;
|
|
lines.forEach(function (line, i) {
|
|
try {
|
|
transform.pushLine(JSON.parse(line));
|
|
} catch (e) {
|
|
if (i === lines.length - 1) {
|
|
pendingData = true;
|
|
} else {
|
|
e.message = "Invalid JSON (".concat(line, ")");
|
|
transform.emit('error', e);
|
|
}
|
|
}
|
|
});
|
|
this._data = pendingData ? this._data.slice(this._data.lastIndexOf('\n')) : '';
|
|
},
|
|
getPendingData: function getPendingData() {
|
|
return this._data;
|
|
}
|
|
};
|
|
}
|
|
/**
|
|
* Init the transform with a parser to process JSON data.
|
|
* It maintains a buffer of received data, parses each as JSON
|
|
* item if the data is an array or the data itself otherwise
|
|
* and send it to `pushLine` for processing.
|
|
*/
|
|
|
|
}, {
|
|
key: "initJSONParser",
|
|
value: function initJSONParser() {
|
|
var transform = this;
|
|
this.parser = new jsonparse();
|
|
|
|
this.parser.onValue = function (value) {
|
|
if (this.stack.length !== this.depthToEmit) return;
|
|
transform.pushLine(value);
|
|
};
|
|
|
|
this.parser._onToken = this.parser.onToken;
|
|
|
|
this.parser.onToken = function (token, value) {
|
|
transform.parser._onToken(token, value);
|
|
|
|
if (this.stack.length === 0 && !transform.opts.fields && this.mode !== jsonparse.C.ARRAY && this.mode !== jsonparse.C.OBJECT) {
|
|
this.onError(new Error('Data should not be empty or the "fields" option should be included'));
|
|
}
|
|
|
|
if (this.stack.length === 1) {
|
|
if (this.depthToEmit === undefined) {
|
|
// If Array emit its content, else emit itself
|
|
this.depthToEmit = this.mode === jsonparse.C.ARRAY ? 1 : 0;
|
|
}
|
|
|
|
if (this.depthToEmit !== 0 && this.stack.length === 1) {
|
|
// No need to store the whole root array in memory
|
|
this.value = undefined;
|
|
}
|
|
}
|
|
};
|
|
|
|
this.parser.getPendingData = function () {
|
|
return this.value;
|
|
};
|
|
|
|
this.parser.onError = function (err) {
|
|
if (err.message.includes('Unexpected')) {
|
|
err.message = "Invalid JSON (".concat(err.message, ")");
|
|
}
|
|
|
|
transform.emit('error', err);
|
|
};
|
|
}
|
|
/**
|
|
* Main function that send data to the parse to be processed.
|
|
*
|
|
* @param {Buffer} chunk Incoming data
|
|
* @param {String} encoding Encoding of the incoming data. Defaults to 'utf8'
|
|
* @param {Function} done Called when the proceesing of the supplied chunk is done
|
|
*/
|
|
|
|
}, {
|
|
key: "_transform",
|
|
value: function _transform(chunk, encoding, done) {
|
|
this.parser.write(chunk);
|
|
done();
|
|
}
|
|
}, {
|
|
key: "_flush",
|
|
value: function _flush(done) {
|
|
if (this.parser.getPendingData()) {
|
|
done(new Error('Invalid data received from stdin', this.parser.getPendingData()));
|
|
}
|
|
|
|
done();
|
|
}
|
|
/**
|
|
* Generate the csv header and pushes it downstream.
|
|
*/
|
|
|
|
}, {
|
|
key: "pushHeader",
|
|
value: function pushHeader() {
|
|
if (this.opts.header) {
|
|
var header = this.getHeader();
|
|
this.emit('header', header);
|
|
this.push(header);
|
|
this._hasWritten = true;
|
|
}
|
|
}
|
|
/**
|
|
* Transforms an incoming json data to csv and pushes it downstream.
|
|
*
|
|
* @param {Object} data JSON object to be converted in a CSV row
|
|
*/
|
|
|
|
}, {
|
|
key: "pushLine",
|
|
value: function pushLine(data) {
|
|
var _this2 = this;
|
|
|
|
var processedData = this.preprocessRow(data);
|
|
|
|
if (!this._hasWritten) {
|
|
this.opts.fields = this.opts.fields || this.preprocessFieldsInfo(Object.keys(processedData[0]));
|
|
this.pushHeader();
|
|
}
|
|
|
|
processedData.forEach(function (row) {
|
|
var line = _this2.processRow(row, _this2.opts);
|
|
|
|
if (line === undefined) return;
|
|
|
|
_this2.emit('line', line);
|
|
|
|
_this2.push(_this2._hasWritten ? _this2.opts.eol + line : line);
|
|
|
|
_this2._hasWritten = true;
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return JSON2CSVTransform;
|
|
}(Transform$1);
|
|
|
|
var JSON2CSVTransform_1 = JSON2CSVTransform;
|
|
|
|
var Transform$2 = Stream.Transform;
|
|
var fastJoin$3 = utils.fastJoin;
|
|
|
|
var JSON2CSVAsyncParser =
|
|
/*#__PURE__*/
|
|
function () {
|
|
function JSON2CSVAsyncParser(opts, transformOpts) {
|
|
_classCallCheck(this, JSON2CSVAsyncParser);
|
|
|
|
this.input = new Transform$2(transformOpts);
|
|
|
|
this.input._read = function () {};
|
|
|
|
this.transform = new JSON2CSVTransform_1(opts, transformOpts);
|
|
this.processor = this.input.pipe(this.transform);
|
|
}
|
|
|
|
_createClass(JSON2CSVAsyncParser, [{
|
|
key: "fromInput",
|
|
value: function fromInput(input) {
|
|
if (this._input) {
|
|
throw new Error('Async parser already has an input.');
|
|
}
|
|
|
|
this._input = input;
|
|
this.input = this._input.pipe(this.processor);
|
|
return this;
|
|
}
|
|
}, {
|
|
key: "throughTransform",
|
|
value: function throughTransform(transform) {
|
|
if (this._output) {
|
|
throw new Error('Can\'t add transforms once an output has been added.');
|
|
}
|
|
|
|
this.processor = this.processor.pipe(transform);
|
|
return this;
|
|
}
|
|
}, {
|
|
key: "toOutput",
|
|
value: function toOutput(output) {
|
|
if (this._output) {
|
|
throw new Error('Async parser already has an output.');
|
|
}
|
|
|
|
this._output = output;
|
|
this.processor = this.processor.pipe(output);
|
|
return this;
|
|
}
|
|
}, {
|
|
key: "promise",
|
|
value: function promise() {
|
|
var _this = this;
|
|
|
|
var returnCSV = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : true;
|
|
return new Promise(function (resolve, reject) {
|
|
if (!returnCSV) {
|
|
_this.processor.on('finish', function () {
|
|
return resolve();
|
|
}).on('error', function (err) {
|
|
return reject(err);
|
|
});
|
|
|
|
return;
|
|
}
|
|
|
|
var csvBuffer = [];
|
|
|
|
_this.processor.on('data', function (chunk) {
|
|
return csvBuffer.push(chunk.toString());
|
|
}).on('finish', function () {
|
|
return resolve(fastJoin$3(csvBuffer, ''));
|
|
}).on('error', function (err) {
|
|
return reject(err);
|
|
});
|
|
});
|
|
}
|
|
}]);
|
|
|
|
return JSON2CSVAsyncParser;
|
|
}();
|
|
|
|
var JSON2CSVAsyncParser_1 = JSON2CSVAsyncParser;
|
|
|
|
/**
|
|
* Performs the flattening of a data row recursively
|
|
*
|
|
* @param {String} separator Separator to be used as the flattened field name
|
|
* @returns {Object => Object} Flattened object
|
|
*/
|
|
function flatten() {
|
|
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
|
|
_ref$objects = _ref.objects,
|
|
objects = _ref$objects === void 0 ? true : _ref$objects,
|
|
_ref$arrays = _ref.arrays,
|
|
arrays = _ref$arrays === void 0 ? false : _ref$arrays,
|
|
_ref$separator = _ref.separator,
|
|
separator = _ref$separator === void 0 ? '.' : _ref$separator;
|
|
|
|
function step(obj, flatDataRow, currentPath) {
|
|
Object.keys(obj).forEach(function (key) {
|
|
var newPath = currentPath ? "".concat(currentPath).concat(separator).concat(key) : key;
|
|
var value = obj[key];
|
|
|
|
if (objects && _typeof(value) === 'object' && value !== null && !Array.isArray(value) && Object.prototype.toString.call(value.toJSON) !== '[object Function]' && Object.keys(value).length) {
|
|
step(value, flatDataRow, newPath);
|
|
return;
|
|
}
|
|
|
|
if (arrays && Array.isArray(value)) {
|
|
step(value, flatDataRow, newPath);
|
|
return;
|
|
}
|
|
|
|
flatDataRow[newPath] = value;
|
|
});
|
|
return flatDataRow;
|
|
}
|
|
|
|
return function (dataRow) {
|
|
return step(dataRow, {});
|
|
};
|
|
}
|
|
|
|
var flatten_1 = flatten;
|
|
|
|
var setProp$1 = utils.setProp,
|
|
unsetProp$1 = utils.unsetProp,
|
|
flattenReducer$3 = utils.flattenReducer;
|
|
|
|
function getUnwindablePaths(obj, currentPath) {
|
|
return Object.keys(obj).reduce(function (unwindablePaths, key) {
|
|
var newPath = currentPath ? "".concat(currentPath, ".").concat(key) : key;
|
|
var value = obj[key];
|
|
|
|
if (_typeof(value) === 'object' && value !== null && !Array.isArray(value) && Object.prototype.toString.call(value.toJSON) !== '[object Function]' && Object.keys(value).length) {
|
|
unwindablePaths = unwindablePaths.concat(getUnwindablePaths(value, newPath));
|
|
} else if (Array.isArray(value)) {
|
|
unwindablePaths.push(newPath);
|
|
unwindablePaths = unwindablePaths.concat(value.map(function (arrObj) {
|
|
return getUnwindablePaths(arrObj, newPath);
|
|
}).reduce(flattenReducer$3, []).filter(function (item, index, arr) {
|
|
return arr.indexOf(item) !== index;
|
|
}));
|
|
}
|
|
|
|
return unwindablePaths;
|
|
}, []);
|
|
}
|
|
/**
|
|
* Performs the unwind recursively in specified sequence
|
|
*
|
|
* @param {String[]} unwindPaths The paths as strings to be used to deconstruct the array
|
|
* @returns {Object => Array} Array of objects containing all rows after unwind of chosen paths
|
|
*/
|
|
|
|
|
|
function unwind() {
|
|
var _ref = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {},
|
|
_ref$paths = _ref.paths,
|
|
paths = _ref$paths === void 0 ? undefined : _ref$paths,
|
|
_ref$blankOut = _ref.blankOut,
|
|
blankOut = _ref$blankOut === void 0 ? false : _ref$blankOut;
|
|
|
|
function unwindReducer(rows, unwindPath) {
|
|
return rows.map(function (row) {
|
|
var unwindArray = lodash_get(row, unwindPath);
|
|
|
|
if (!Array.isArray(unwindArray)) {
|
|
return row;
|
|
}
|
|
|
|
if (!unwindArray.length) {
|
|
return unsetProp$1(row, unwindPath);
|
|
}
|
|
|
|
return unwindArray.map(function (unwindRow, index) {
|
|
var clonedRow = blankOut && index > 0 ? {} : row;
|
|
return setProp$1(clonedRow, unwindPath, unwindRow);
|
|
});
|
|
}).reduce(flattenReducer$3, []);
|
|
}
|
|
|
|
paths = Array.isArray(paths) ? paths : paths ? [paths] : undefined;
|
|
return function (dataRow) {
|
|
return (paths || getUnwindablePaths(dataRow)).reduce(unwindReducer, [dataRow]);
|
|
};
|
|
}
|
|
|
|
var unwind_1 = unwind;
|
|
|
|
var Readable$1 = Stream.Readable;
|
|
var Parser$1 = JSON2CSVParser_1;
|
|
var AsyncParser = JSON2CSVAsyncParser_1;
|
|
var Transform$3 = JSON2CSVTransform_1; // Convenience method to keep the API similar to version 3.X
|
|
|
|
var parse = function parse(data, opts) {
|
|
return new JSON2CSVParser_1(opts).parse(data);
|
|
};
|
|
|
|
var parseAsync = function parseAsync(data, opts, transformOpts) {
|
|
try {
|
|
if (!(data instanceof Readable$1)) {
|
|
transformOpts = Object.assign({}, transformOpts, {
|
|
objectMode: true
|
|
});
|
|
}
|
|
|
|
var asyncParser = new JSON2CSVAsyncParser_1(opts, transformOpts);
|
|
var promise = asyncParser.promise();
|
|
|
|
if (Array.isArray(data)) {
|
|
data.forEach(function (item) {
|
|
return asyncParser.input.push(item);
|
|
});
|
|
asyncParser.input.push(null);
|
|
} else if (data instanceof Readable$1) {
|
|
asyncParser.fromInput(data);
|
|
} else {
|
|
asyncParser.input.push(data);
|
|
asyncParser.input.push(null);
|
|
}
|
|
|
|
return promise;
|
|
} catch (err) {
|
|
return Promise.reject(err);
|
|
}
|
|
};
|
|
|
|
var transforms = {
|
|
flatten: flatten_1,
|
|
unwind: unwind_1
|
|
};
|
|
var json2csv = {
|
|
Parser: Parser$1,
|
|
AsyncParser: AsyncParser,
|
|
Transform: Transform$3,
|
|
parse: parse,
|
|
parseAsync: parseAsync,
|
|
transforms: transforms
|
|
};
|
|
|
|
exports.AsyncParser = AsyncParser;
|
|
exports.Parser = Parser$1;
|
|
exports.Transform = Transform$3;
|
|
exports.default = json2csv;
|
|
exports.parse = parse;
|
|
exports.parseAsync = parseAsync;
|
|
exports.transforms = transforms;
|
|
|
|
Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
|
}));
|